From edebf70203f9973ef4b05d6b8bf2af31ee0fd0b7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 8 Aug 2024 17:06:11 -0400 Subject: [PATCH] feat: [google-cloud-translate] Adds AdaptiveMt HTML/Glossary support (#12981) BEGIN_COMMIT_OVERRIDE feat: Adds AdaptiveMt HTML/Glossary support feat: Adds protos for Custom Translation API (AutoML) feat: Adds protos for Transliteration in V3 Advanced translate text feat: Adds protos for Romanization APIs docs: Fixes typos in docs END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: Adds protos for Custom Translation API (AutoML) feat: Adds protos for Transliteration in V3 Advanced translate text feat: Adds protos for Romanization APIs docs: Fixes typos in docs PiperOrigin-RevId: 659659171 Source-Link: https://github.com/googleapis/googleapis/commit/db79b1a514b9dcf5e824e0b79e0dcae0f55e6827 Source-Link: https://github.com/googleapis/googleapis-gen/commit/3288b61231bcc0ab6e4601d42fc21ea2a0eddc06 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXRyYW5zbGF0ZS8uT3dsQm90LnlhbWwiLCJoIjoiMzI4OGI2MTIzMWJjYzBhYjZlNDYwMWQ0MmZjMjFlYTJhMGVkZGMwNiJ9 --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/cloud/translate/__init__.py | 84 + .../google/cloud/translate/gapic_version.py | 2 +- .../google/cloud/translate_v3/__init__.py | 90 +- .../cloud/translate_v3/gapic_metadata.json | 270 + .../cloud/translate_v3/gapic_version.py | 2 +- .../translation_service/async_client.py | 3272 +- .../services/translation_service/client.py | 3436 +- .../services/translation_service/pagers.py | 619 +- .../translation_service/transports/base.py | 340 +- .../translation_service/transports/grpc.py | 624 +- .../transports/grpc_asyncio.py | 737 +- .../translation_service/transports/rest.py | 4220 ++- .../cloud/translate_v3/types/__init__.py | 90 +- .../cloud/translate_v3/types/adaptive_mt.py | 137 +- .../translate_v3/types/automl_translation.py | 923 + .../google/cloud/translate_v3/types/common.py | 140 + .../translate_v3/types/translation_service.py | 381 +- .../cloud/translate_v3beta1/gapic_version.py | 2 +- ..._metadata_google.cloud.translation.v3.json | 4084 +- ...data_google.cloud.translation.v3beta1.json | 2 +- ...ranslation_service_create_dataset_async.py | 56 + ...translation_service_create_dataset_sync.py | 56 + ...ion_service_create_glossary_entry_async.py | 52 + ...tion_service_create_glossary_entry_sync.py | 52 + ..._translation_service_create_model_async.py | 56 + ...d_translation_service_create_model_sync.py | 56 + ...ranslation_service_delete_dataset_async.py | 56 + ...translation_service_delete_dataset_sync.py | 56 + ...ion_service_delete_glossary_entry_async.py | 50 + ...tion_service_delete_glossary_entry_sync.py | 50 + ..._translation_service_delete_model_async.py | 56 + ...d_translation_service_delete_model_sync.py | 56 + ...d_translation_service_export_data_async.py | 60 + ...ed_translation_service_export_data_sync.py | 60 + ...d_translation_service_get_dataset_async.py | 52 + ...ed_translation_service_get_dataset_sync.py | 52 + ...lation_service_get_glossary_entry_async.py | 52 + ...slation_service_get_glossary_entry_sync.py | 52 + ...ted_translation_service_get_model_async.py | 52 + ...ated_translation_service_get_model_sync.py | 52 + ...d_translation_service_import_data_async.py | 56 + ...ed_translation_service_import_data_sync.py | 56 + ...translation_service_list_datasets_async.py | 53 + ..._translation_service_list_datasets_sync.py | 53 + ...translation_service_list_examples_async.py | 53 + ..._translation_service_list_examples_sync.py | 53 + ...ion_service_list_glossary_entries_async.py | 53 + ...tion_service_list_glossary_entries_sync.py | 53 + ...d_translation_service_list_models_async.py | 53 + ...ed_translation_service_list_models_sync.py | 53 + ...translation_service_romanize_text_async.py | 53 + ..._translation_service_romanize_text_sync.py | 53 + ...anslation_service_update_glossary_async.py | 59 + ...ion_service_update_glossary_entry_async.py | 51 + ...tion_service_update_glossary_entry_sync.py | 51 + ...ranslation_service_update_glossary_sync.py | 59 + .../scripts/fixup_translate_v3_keywords.py | 22 +- packages/google-cloud-translate/setup.py | 1 + .../testing/constraints-3.10.txt | 1 + .../testing/constraints-3.11.txt | 1 + .../testing/constraints-3.12.txt | 1 + .../testing/constraints-3.7.txt | 1 + .../testing/constraints-3.8.txt | 1 + .../testing/constraints-3.9.txt | 1 + .../translate_v3/test_translation_service.py | 31356 ++++++++++++---- 65 files changed, 42899 insertions(+), 9887 deletions(-) create mode 100644 packages/google-cloud-translate/google/cloud/translate_v3/types/automl_translation.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_dataset_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_dataset_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_entry_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_entry_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_model_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_model_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_dataset_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_dataset_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_glossary_entry_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_glossary_entry_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_model_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_model_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_export_data_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_export_data_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_dataset_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_dataset_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_entry_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_entry_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_model_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_model_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_data_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_data_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_datasets_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_datasets_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_examples_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_examples_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossary_entries_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossary_entries_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_models_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_models_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_romanize_text_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_romanize_text_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_entry_async.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_entry_sync.py create mode 100644 packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_sync.py diff --git a/packages/google-cloud-translate/google/cloud/translate/__init__.py b/packages/google-cloud-translate/google/cloud/translate/__init__.py index 05e54ebe0a0c..9a97aabb9e31 100644 --- a/packages/google-cloud-translate/google/cloud/translate/__init__.py +++ b/packages/google-cloud-translate/google/cloud/translate/__init__.py @@ -45,10 +45,41 @@ ListAdaptiveMtSentencesRequest, ListAdaptiveMtSentencesResponse, ) +from google.cloud.translate_v3.types.automl_translation import ( + BatchTransferResourcesResponse, + CreateDatasetMetadata, + CreateDatasetRequest, + CreateModelMetadata, + CreateModelRequest, + Dataset, + DatasetInputConfig, + DatasetOutputConfig, + DeleteDatasetMetadata, + DeleteDatasetRequest, + DeleteModelMetadata, + DeleteModelRequest, + Example, + ExportDataMetadata, + ExportDataRequest, + GetDatasetRequest, + GetModelRequest, + ImportDataMetadata, + ImportDataRequest, + ListDatasetsRequest, + ListDatasetsResponse, + ListExamplesRequest, + ListExamplesResponse, + ListModelsRequest, + ListModelsResponse, + Model, +) from google.cloud.translate_v3.types.common import ( FileInputSource, GcsInputSource, GcsOutputDestination, + GlossaryEntry, + GlossaryTerm, + OperationState, ) from google.cloud.translate_v3.types.translation_service import ( BatchDocumentInputConfig, @@ -59,8 +90,10 @@ BatchTranslateMetadata, BatchTranslateResponse, BatchTranslateTextRequest, + CreateGlossaryEntryRequest, CreateGlossaryMetadata, CreateGlossaryRequest, + DeleteGlossaryEntryRequest, DeleteGlossaryMetadata, DeleteGlossaryRequest, DeleteGlossaryResponse, @@ -72,6 +105,7 @@ DocumentTranslation, GcsDestination, GcsSource, + GetGlossaryEntryRequest, GetGlossaryRequest, GetSupportedLanguagesRequest, Glossary, @@ -79,7 +113,12 @@ InputConfig, ListGlossariesRequest, ListGlossariesResponse, + ListGlossaryEntriesRequest, + ListGlossaryEntriesResponse, OutputConfig, + Romanization, + RomanizeTextRequest, + RomanizeTextResponse, SupportedLanguage, SupportedLanguages, TranslateDocumentRequest, @@ -88,6 +127,10 @@ TranslateTextRequest, TranslateTextResponse, Translation, + TransliterationConfig, + UpdateGlossaryEntryRequest, + UpdateGlossaryMetadata, + UpdateGlossaryRequest, ) __all__ = ( @@ -112,9 +155,38 @@ "ListAdaptiveMtFilesResponse", "ListAdaptiveMtSentencesRequest", "ListAdaptiveMtSentencesResponse", + "BatchTransferResourcesResponse", + "CreateDatasetMetadata", + "CreateDatasetRequest", + "CreateModelMetadata", + "CreateModelRequest", + "Dataset", + "DatasetInputConfig", + "DatasetOutputConfig", + "DeleteDatasetMetadata", + "DeleteDatasetRequest", + "DeleteModelMetadata", + "DeleteModelRequest", + "Example", + "ExportDataMetadata", + "ExportDataRequest", + "GetDatasetRequest", + "GetModelRequest", + "ImportDataMetadata", + "ImportDataRequest", + "ListDatasetsRequest", + "ListDatasetsResponse", + "ListExamplesRequest", + "ListExamplesResponse", + "ListModelsRequest", + "ListModelsResponse", + "Model", "FileInputSource", "GcsInputSource", "GcsOutputDestination", + "GlossaryEntry", + "GlossaryTerm", + "OperationState", "BatchDocumentInputConfig", "BatchDocumentOutputConfig", "BatchTranslateDocumentMetadata", @@ -123,8 +195,10 @@ "BatchTranslateMetadata", "BatchTranslateResponse", "BatchTranslateTextRequest", + "CreateGlossaryEntryRequest", "CreateGlossaryMetadata", "CreateGlossaryRequest", + "DeleteGlossaryEntryRequest", "DeleteGlossaryMetadata", "DeleteGlossaryRequest", "DeleteGlossaryResponse", @@ -136,6 +210,7 @@ "DocumentTranslation", "GcsDestination", "GcsSource", + "GetGlossaryEntryRequest", "GetGlossaryRequest", "GetSupportedLanguagesRequest", "Glossary", @@ -143,7 +218,12 @@ "InputConfig", "ListGlossariesRequest", "ListGlossariesResponse", + "ListGlossaryEntriesRequest", + "ListGlossaryEntriesResponse", "OutputConfig", + "Romanization", + "RomanizeTextRequest", + "RomanizeTextResponse", "SupportedLanguage", "SupportedLanguages", "TranslateDocumentRequest", @@ -152,4 +232,8 @@ "TranslateTextRequest", "TranslateTextResponse", "Translation", + "TransliterationConfig", + "UpdateGlossaryEntryRequest", + "UpdateGlossaryMetadata", + "UpdateGlossaryRequest", ) diff --git a/packages/google-cloud-translate/google/cloud/translate/gapic_version.py b/packages/google-cloud-translate/google/cloud/translate/gapic_version.py index 71a07a06cd93..558c8aab67c5 100644 --- a/packages/google-cloud-translate/google/cloud/translate/gapic_version.py +++ b/packages/google-cloud-translate/google/cloud/translate/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/__init__.py b/packages/google-cloud-translate/google/cloud/translate_v3/__init__.py index 483a4a16e8b9..9230dfb1e1f5 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/__init__.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/__init__.py @@ -43,7 +43,42 @@ ListAdaptiveMtSentencesRequest, ListAdaptiveMtSentencesResponse, ) -from .types.common import FileInputSource, GcsInputSource, GcsOutputDestination +from .types.automl_translation import ( + BatchTransferResourcesResponse, + CreateDatasetMetadata, + CreateDatasetRequest, + CreateModelMetadata, + CreateModelRequest, + Dataset, + DatasetInputConfig, + DatasetOutputConfig, + DeleteDatasetMetadata, + DeleteDatasetRequest, + DeleteModelMetadata, + DeleteModelRequest, + Example, + ExportDataMetadata, + ExportDataRequest, + GetDatasetRequest, + GetModelRequest, + ImportDataMetadata, + ImportDataRequest, + ListDatasetsRequest, + ListDatasetsResponse, + ListExamplesRequest, + ListExamplesResponse, + ListModelsRequest, + ListModelsResponse, + Model, +) +from .types.common import ( + FileInputSource, + GcsInputSource, + GcsOutputDestination, + GlossaryEntry, + GlossaryTerm, + OperationState, +) from .types.translation_service import ( BatchDocumentInputConfig, BatchDocumentOutputConfig, @@ -53,8 +88,10 @@ BatchTranslateMetadata, BatchTranslateResponse, BatchTranslateTextRequest, + CreateGlossaryEntryRequest, CreateGlossaryMetadata, CreateGlossaryRequest, + DeleteGlossaryEntryRequest, DeleteGlossaryMetadata, DeleteGlossaryRequest, DeleteGlossaryResponse, @@ -66,6 +103,7 @@ DocumentTranslation, GcsDestination, GcsSource, + GetGlossaryEntryRequest, GetGlossaryRequest, GetSupportedLanguagesRequest, Glossary, @@ -73,7 +111,12 @@ InputConfig, ListGlossariesRequest, ListGlossariesResponse, + ListGlossaryEntriesRequest, + ListGlossaryEntriesResponse, OutputConfig, + Romanization, + RomanizeTextRequest, + RomanizeTextResponse, SupportedLanguage, SupportedLanguages, TranslateDocumentRequest, @@ -82,6 +125,10 @@ TranslateTextRequest, TranslateTextResponse, Translation, + TransliterationConfig, + UpdateGlossaryEntryRequest, + UpdateGlossaryMetadata, + UpdateGlossaryRequest, ) __all__ = ( @@ -94,6 +141,7 @@ "AdaptiveMtTranslation", "BatchDocumentInputConfig", "BatchDocumentOutputConfig", + "BatchTransferResourcesResponse", "BatchTranslateDocumentMetadata", "BatchTranslateDocumentRequest", "BatchTranslateDocumentResponse", @@ -101,19 +149,35 @@ "BatchTranslateResponse", "BatchTranslateTextRequest", "CreateAdaptiveMtDatasetRequest", + "CreateDatasetMetadata", + "CreateDatasetRequest", + "CreateGlossaryEntryRequest", "CreateGlossaryMetadata", "CreateGlossaryRequest", + "CreateModelMetadata", + "CreateModelRequest", + "Dataset", + "DatasetInputConfig", + "DatasetOutputConfig", "DeleteAdaptiveMtDatasetRequest", "DeleteAdaptiveMtFileRequest", + "DeleteDatasetMetadata", + "DeleteDatasetRequest", + "DeleteGlossaryEntryRequest", "DeleteGlossaryMetadata", "DeleteGlossaryRequest", "DeleteGlossaryResponse", + "DeleteModelMetadata", + "DeleteModelRequest", "DetectLanguageRequest", "DetectLanguageResponse", "DetectedLanguage", "DocumentInputConfig", "DocumentOutputConfig", "DocumentTranslation", + "Example", + "ExportDataMetadata", + "ExportDataRequest", "FileInputSource", "GcsDestination", "GcsInputSource", @@ -121,12 +185,19 @@ "GcsSource", "GetAdaptiveMtDatasetRequest", "GetAdaptiveMtFileRequest", + "GetDatasetRequest", + "GetGlossaryEntryRequest", "GetGlossaryRequest", + "GetModelRequest", "GetSupportedLanguagesRequest", "Glossary", + "GlossaryEntry", "GlossaryInputConfig", + "GlossaryTerm", "ImportAdaptiveMtFileRequest", "ImportAdaptiveMtFileResponse", + "ImportDataMetadata", + "ImportDataRequest", "InputConfig", "ListAdaptiveMtDatasetsRequest", "ListAdaptiveMtDatasetsResponse", @@ -134,9 +205,22 @@ "ListAdaptiveMtFilesResponse", "ListAdaptiveMtSentencesRequest", "ListAdaptiveMtSentencesResponse", + "ListDatasetsRequest", + "ListDatasetsResponse", + "ListExamplesRequest", + "ListExamplesResponse", "ListGlossariesRequest", "ListGlossariesResponse", + "ListGlossaryEntriesRequest", + "ListGlossaryEntriesResponse", + "ListModelsRequest", + "ListModelsResponse", + "Model", + "OperationState", "OutputConfig", + "Romanization", + "RomanizeTextRequest", + "RomanizeTextResponse", "SupportedLanguage", "SupportedLanguages", "TranslateDocumentRequest", @@ -146,4 +230,8 @@ "TranslateTextResponse", "Translation", "TranslationServiceClient", + "TransliterationConfig", + "UpdateGlossaryEntryRequest", + "UpdateGlossaryMetadata", + "UpdateGlossaryRequest", ) diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/gapic_metadata.json b/packages/google-cloud-translate/google/cloud/translate_v3/gapic_metadata.json index 4f341c6ab80c..c8fc723fdbcd 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/gapic_metadata.json +++ b/packages/google-cloud-translate/google/cloud/translate_v3/gapic_metadata.json @@ -30,11 +30,26 @@ "create_adaptive_mt_dataset" ] }, + "CreateDataset": { + "methods": [ + "create_dataset" + ] + }, "CreateGlossary": { "methods": [ "create_glossary" ] }, + "CreateGlossaryEntry": { + "methods": [ + "create_glossary_entry" + ] + }, + "CreateModel": { + "methods": [ + "create_model" + ] + }, "DeleteAdaptiveMtDataset": { "methods": [ "delete_adaptive_mt_dataset" @@ -45,16 +60,36 @@ "delete_adaptive_mt_file" ] }, + "DeleteDataset": { + "methods": [ + "delete_dataset" + ] + }, "DeleteGlossary": { "methods": [ "delete_glossary" ] }, + "DeleteGlossaryEntry": { + "methods": [ + "delete_glossary_entry" + ] + }, + "DeleteModel": { + "methods": [ + "delete_model" + ] + }, "DetectLanguage": { "methods": [ "detect_language" ] }, + "ExportData": { + "methods": [ + "export_data" + ] + }, "GetAdaptiveMtDataset": { "methods": [ "get_adaptive_mt_dataset" @@ -65,11 +100,26 @@ "get_adaptive_mt_file" ] }, + "GetDataset": { + "methods": [ + "get_dataset" + ] + }, "GetGlossary": { "methods": [ "get_glossary" ] }, + "GetGlossaryEntry": { + "methods": [ + "get_glossary_entry" + ] + }, + "GetModel": { + "methods": [ + "get_model" + ] + }, "GetSupportedLanguages": { "methods": [ "get_supported_languages" @@ -80,6 +130,11 @@ "import_adaptive_mt_file" ] }, + "ImportData": { + "methods": [ + "import_data" + ] + }, "ListAdaptiveMtDatasets": { "methods": [ "list_adaptive_mt_datasets" @@ -95,11 +150,36 @@ "list_adaptive_mt_sentences" ] }, + "ListDatasets": { + "methods": [ + "list_datasets" + ] + }, + "ListExamples": { + "methods": [ + "list_examples" + ] + }, "ListGlossaries": { "methods": [ "list_glossaries" ] }, + "ListGlossaryEntries": { + "methods": [ + "list_glossary_entries" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + }, + "RomanizeText": { + "methods": [ + "romanize_text" + ] + }, "TranslateDocument": { "methods": [ "translate_document" @@ -109,6 +189,16 @@ "methods": [ "translate_text" ] + }, + "UpdateGlossary": { + "methods": [ + "update_glossary" + ] + }, + "UpdateGlossaryEntry": { + "methods": [ + "update_glossary_entry" + ] } } }, @@ -135,11 +225,26 @@ "create_adaptive_mt_dataset" ] }, + "CreateDataset": { + "methods": [ + "create_dataset" + ] + }, "CreateGlossary": { "methods": [ "create_glossary" ] }, + "CreateGlossaryEntry": { + "methods": [ + "create_glossary_entry" + ] + }, + "CreateModel": { + "methods": [ + "create_model" + ] + }, "DeleteAdaptiveMtDataset": { "methods": [ "delete_adaptive_mt_dataset" @@ -150,16 +255,36 @@ "delete_adaptive_mt_file" ] }, + "DeleteDataset": { + "methods": [ + "delete_dataset" + ] + }, "DeleteGlossary": { "methods": [ "delete_glossary" ] }, + "DeleteGlossaryEntry": { + "methods": [ + "delete_glossary_entry" + ] + }, + "DeleteModel": { + "methods": [ + "delete_model" + ] + }, "DetectLanguage": { "methods": [ "detect_language" ] }, + "ExportData": { + "methods": [ + "export_data" + ] + }, "GetAdaptiveMtDataset": { "methods": [ "get_adaptive_mt_dataset" @@ -170,11 +295,26 @@ "get_adaptive_mt_file" ] }, + "GetDataset": { + "methods": [ + "get_dataset" + ] + }, "GetGlossary": { "methods": [ "get_glossary" ] }, + "GetGlossaryEntry": { + "methods": [ + "get_glossary_entry" + ] + }, + "GetModel": { + "methods": [ + "get_model" + ] + }, "GetSupportedLanguages": { "methods": [ "get_supported_languages" @@ -185,6 +325,11 @@ "import_adaptive_mt_file" ] }, + "ImportData": { + "methods": [ + "import_data" + ] + }, "ListAdaptiveMtDatasets": { "methods": [ "list_adaptive_mt_datasets" @@ -200,11 +345,36 @@ "list_adaptive_mt_sentences" ] }, + "ListDatasets": { + "methods": [ + "list_datasets" + ] + }, + "ListExamples": { + "methods": [ + "list_examples" + ] + }, "ListGlossaries": { "methods": [ "list_glossaries" ] }, + "ListGlossaryEntries": { + "methods": [ + "list_glossary_entries" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + }, + "RomanizeText": { + "methods": [ + "romanize_text" + ] + }, "TranslateDocument": { "methods": [ "translate_document" @@ -214,6 +384,16 @@ "methods": [ "translate_text" ] + }, + "UpdateGlossary": { + "methods": [ + "update_glossary" + ] + }, + "UpdateGlossaryEntry": { + "methods": [ + "update_glossary_entry" + ] } } }, @@ -240,11 +420,26 @@ "create_adaptive_mt_dataset" ] }, + "CreateDataset": { + "methods": [ + "create_dataset" + ] + }, "CreateGlossary": { "methods": [ "create_glossary" ] }, + "CreateGlossaryEntry": { + "methods": [ + "create_glossary_entry" + ] + }, + "CreateModel": { + "methods": [ + "create_model" + ] + }, "DeleteAdaptiveMtDataset": { "methods": [ "delete_adaptive_mt_dataset" @@ -255,16 +450,36 @@ "delete_adaptive_mt_file" ] }, + "DeleteDataset": { + "methods": [ + "delete_dataset" + ] + }, "DeleteGlossary": { "methods": [ "delete_glossary" ] }, + "DeleteGlossaryEntry": { + "methods": [ + "delete_glossary_entry" + ] + }, + "DeleteModel": { + "methods": [ + "delete_model" + ] + }, "DetectLanguage": { "methods": [ "detect_language" ] }, + "ExportData": { + "methods": [ + "export_data" + ] + }, "GetAdaptiveMtDataset": { "methods": [ "get_adaptive_mt_dataset" @@ -275,11 +490,26 @@ "get_adaptive_mt_file" ] }, + "GetDataset": { + "methods": [ + "get_dataset" + ] + }, "GetGlossary": { "methods": [ "get_glossary" ] }, + "GetGlossaryEntry": { + "methods": [ + "get_glossary_entry" + ] + }, + "GetModel": { + "methods": [ + "get_model" + ] + }, "GetSupportedLanguages": { "methods": [ "get_supported_languages" @@ -290,6 +520,11 @@ "import_adaptive_mt_file" ] }, + "ImportData": { + "methods": [ + "import_data" + ] + }, "ListAdaptiveMtDatasets": { "methods": [ "list_adaptive_mt_datasets" @@ -305,11 +540,36 @@ "list_adaptive_mt_sentences" ] }, + "ListDatasets": { + "methods": [ + "list_datasets" + ] + }, + "ListExamples": { + "methods": [ + "list_examples" + ] + }, "ListGlossaries": { "methods": [ "list_glossaries" ] }, + "ListGlossaryEntries": { + "methods": [ + "list_glossary_entries" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + }, + "RomanizeText": { + "methods": [ + "romanize_text" + ] + }, "TranslateDocument": { "methods": [ "translate_document" @@ -319,6 +579,16 @@ "methods": [ "translate_text" ] + }, + "UpdateGlossary": { + "methods": [ + "update_glossary" + ] + }, + "UpdateGlossaryEntry": { + "methods": [ + "update_glossary_entry" + ] } } } diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/gapic_version.py b/packages/google-cloud-translate/google/cloud/translate_v3/gapic_version.py index 71a07a06cd93..558c8aab67c5 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/gapic_version.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/async_client.py b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/async_client.py index 73762370d397..8de8df0a3b2a 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/async_client.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/async_client.py @@ -45,10 +45,21 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.translate_v3.services.translation_service import pagers -from google.cloud.translate_v3.types import adaptive_mt, translation_service +from google.cloud.translate_v3.types import ( + adaptive_mt, + automl_translation, + common, + translation_service, +) from .client import TranslationServiceClient from .transports.base import DEFAULT_CLIENT_INFO, TranslationServiceTransport @@ -83,8 +94,18 @@ class TranslationServiceAsyncClient: parse_adaptive_mt_sentence_path = staticmethod( TranslationServiceClient.parse_adaptive_mt_sentence_path ) + dataset_path = staticmethod(TranslationServiceClient.dataset_path) + parse_dataset_path = staticmethod(TranslationServiceClient.parse_dataset_path) + example_path = staticmethod(TranslationServiceClient.example_path) + parse_example_path = staticmethod(TranslationServiceClient.parse_example_path) glossary_path = staticmethod(TranslationServiceClient.glossary_path) parse_glossary_path = staticmethod(TranslationServiceClient.parse_glossary_path) + glossary_entry_path = staticmethod(TranslationServiceClient.glossary_entry_path) + parse_glossary_entry_path = staticmethod( + TranslationServiceClient.parse_glossary_entry_path + ) + model_path = staticmethod(TranslationServiceClient.model_path) + parse_model_path = staticmethod(TranslationServiceClient.parse_model_path) common_billing_account_path = staticmethod( TranslationServiceClient.common_billing_account_path ) @@ -382,6 +403,9 @@ async def sample_translate_text(): - General (built-in) models: ``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``, + - Translation LLM models: + ``projects/{project-number-or-id}/locations/{location-id}/models/general/translation-llm``, + For global (non-regionalized) requests, use ``location-id`` ``global``. For example, ``projects/{project-number-or-id}/locations/global/models/general/nmt``. @@ -489,6 +513,132 @@ async def sample_translate_text(): # Done; return the response. return response + async def romanize_text( + self, + request: Optional[Union[translation_service.RomanizeTextRequest, dict]] = None, + *, + parent: Optional[str] = None, + contents: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> translation_service.RomanizeTextResponse: + r"""Romanize input text written in non-Latin scripts to + Latin text. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_romanize_text(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.RomanizeTextRequest( + parent="parent_value", + contents=['contents_value1', 'contents_value2'], + ) + + # Make the request + response = await client.romanize_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.RomanizeTextRequest, dict]]): + The request object. The request message for synchronous + romanization. + parent (:class:`str`): + Required. Project or location to make a call. Must refer + to a caller's project. + + Format: + ``projects/{project-number-or-id}/locations/{location-id}`` + or ``projects/{project-number-or-id}``. + + For global calls, use + ``projects/{project-number-or-id}/locations/global`` or + ``projects/{project-number-or-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (:class:`MutableSequence[str]`): + Required. The content of the input in + string format. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.RomanizeTextResponse: + The response message for synchronous + romanization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, translation_service.RomanizeTextRequest): + request = translation_service.RomanizeTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if contents: + request.contents.extend(contents) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.romanize_text + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def detect_language( self, request: Optional[ @@ -1361,6 +1511,145 @@ async def sample_create_glossary(): # Done; return the response. return response + async def update_glossary( + self, + request: Optional[ + Union[translation_service.UpdateGlossaryRequest, dict] + ] = None, + *, + glossary: Optional[translation_service.Glossary] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a glossary. A LRO is used since the update + can be async if the glossary's entry file is updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_update_glossary(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + glossary = translate_v3.Glossary() + glossary.name = "name_value" + + request = translate_v3.UpdateGlossaryRequest( + glossary=glossary, + ) + + # Make the request + operation = client.update_glossary(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.UpdateGlossaryRequest, dict]]): + The request object. Request message for the update + glossary flow + glossary (:class:`google.cloud.translate_v3.types.Glossary`): + Required. The glossary entry to + update. + + This corresponds to the ``glossary`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to be updated. Currently only + ``display_name`` and 'input_config' + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.translate_v3.types.Glossary` + Represents a glossary built from user-provided data. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([glossary, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, translation_service.UpdateGlossaryRequest): + request = translation_service.UpdateGlossaryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if glossary is not None: + request.glossary = glossary + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_glossary + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("glossary.name", request.glossary.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + translation_service.Glossary, + metadata_type=translation_service.UpdateGlossaryMetadata, + ) + + # Done; return the response. + return response + async def list_glossaries( self, request: Optional[ @@ -1715,19 +2004,18 @@ async def sample_delete_glossary(): # Done; return the response. return response - async def create_adaptive_mt_dataset( + async def get_glossary_entry( self, request: Optional[ - Union[adaptive_mt.CreateAdaptiveMtDatasetRequest, dict] + Union[translation_service.GetGlossaryEntryRequest, dict] ] = None, *, - parent: Optional[str] = None, - adaptive_mt_dataset: Optional[adaptive_mt.AdaptiveMtDataset] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> adaptive_mt.AdaptiveMtDataset: - r"""Creates an Adaptive MT dataset. + ) -> common.GlossaryEntry: + r"""Gets a single glossary entry by the given id. .. code-block:: python @@ -1740,41 +2028,30 @@ async def create_adaptive_mt_dataset( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - async def sample_create_adaptive_mt_dataset(): + async def sample_get_glossary_entry(): # Create a client client = translate_v3.TranslationServiceAsyncClient() # Initialize request argument(s) - adaptive_mt_dataset = translate_v3.AdaptiveMtDataset() - adaptive_mt_dataset.name = "name_value" - - request = translate_v3.CreateAdaptiveMtDatasetRequest( - parent="parent_value", - adaptive_mt_dataset=adaptive_mt_dataset, + request = translate_v3.GetGlossaryEntryRequest( + name="name_value", ) # Make the request - response = await client.create_adaptive_mt_dataset(request=request) + response = await client.get_glossary_entry(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.translate_v3.types.CreateAdaptiveMtDatasetRequest, dict]]): - The request object. Request message for creating an - AdaptiveMtDataset. - parent (:class:`str`): - Required. Name of the parent project. In form of - ``projects/{project-number-or-id}/locations/{location-id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - adaptive_mt_dataset (:class:`google.cloud.translate_v3.types.AdaptiveMtDataset`): - Required. The AdaptiveMtDataset to be - created. + request (Optional[Union[google.cloud.translate_v3.types.GetGlossaryEntryRequest, dict]]): + The request object. Request message for the Get Glossary + Entry Api + name (:class:`str`): + Required. The resource name of the + glossary entry to get - This corresponds to the ``adaptive_mt_dataset`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -1784,13 +2061,15 @@ async def sample_create_adaptive_mt_dataset(): sent along with the request as metadata. Returns: - google.cloud.translate_v3.types.AdaptiveMtDataset: - An Adaptive MT Dataset. + google.cloud.translate_v3.types.GlossaryEntry: + Represents a single entry in a + glossary. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, adaptive_mt_dataset]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1799,26 +2078,24 @@ async def sample_create_adaptive_mt_dataset(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.CreateAdaptiveMtDatasetRequest): - request = adaptive_mt.CreateAdaptiveMtDatasetRequest(request) + if not isinstance(request, translation_service.GetGlossaryEntryRequest): + request = translation_service.GetGlossaryEntryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent - if adaptive_mt_dataset is not None: - request.adaptive_mt_dataset = adaptive_mt_dataset + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_adaptive_mt_dataset + self._client._transport.get_glossary_entry ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1835,19 +2112,18 @@ async def sample_create_adaptive_mt_dataset(): # Done; return the response. return response - async def delete_adaptive_mt_dataset( + async def list_glossary_entries( self, request: Optional[ - Union[adaptive_mt.DeleteAdaptiveMtDatasetRequest, dict] + Union[translation_service.ListGlossaryEntriesRequest, dict] ] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an Adaptive MT dataset, including all its - entries and associated metadata. + ) -> pagers.ListGlossaryEntriesAsyncPager: + r"""List the entries for the glossary. .. code-block:: python @@ -1860,27 +2136,32 @@ async def delete_adaptive_mt_dataset( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - async def sample_delete_adaptive_mt_dataset(): + async def sample_list_glossary_entries(): # Create a client client = translate_v3.TranslationServiceAsyncClient() # Initialize request argument(s) - request = translate_v3.DeleteAdaptiveMtDatasetRequest( - name="name_value", + request = translate_v3.ListGlossaryEntriesRequest( + parent="parent_value", ) # Make the request - await client.delete_adaptive_mt_dataset(request=request) + page_result = client.list_glossary_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) Args: - request (Optional[Union[google.cloud.translate_v3.types.DeleteAdaptiveMtDatasetRequest, dict]]): - The request object. Request message for deleting an - AdaptiveMtDataset. - name (:class:`str`): - Required. Name of the dataset. In the form of - ``projects/{project-number-or-id}/locations/{location-id}/adaptiveMtDatasets/{adaptive-mt-dataset-id}`` + request (Optional[Union[google.cloud.translate_v3.types.ListGlossaryEntriesRequest, dict]]): + The request object. Request message for + ListGlossaryEntries + parent (:class:`str`): + Required. The parent glossary + resource name for listing the glossary's + entries. - This corresponds to the ``name`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -1888,11 +2169,20 @@ async def sample_delete_adaptive_mt_dataset(): timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListGlossaryEntriesAsyncPager: + Response message for + ListGlossaryEntries + Iterating over this object will yield + results and resolve additional pages + automatically. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1901,47 +2191,64 @@ async def sample_delete_adaptive_mt_dataset(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.DeleteAdaptiveMtDatasetRequest): - request = adaptive_mt.DeleteAdaptiveMtDatasetRequest(request) + if not isinstance(request, translation_service.ListGlossaryEntriesRequest): + request = translation_service.ListGlossaryEntriesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_adaptive_mt_dataset + self._client._transport.list_glossary_entries ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc( + response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - async def get_adaptive_mt_dataset( + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListGlossaryEntriesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_glossary_entry( self, - request: Optional[Union[adaptive_mt.GetAdaptiveMtDatasetRequest, dict]] = None, + request: Optional[ + Union[translation_service.CreateGlossaryEntryRequest, dict] + ] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, + glossary_entry: Optional[common.GlossaryEntry] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> adaptive_mt.AdaptiveMtDataset: - r"""Gets the Adaptive MT dataset. + ) -> common.GlossaryEntry: + r"""Creates a glossary entry. .. code-block:: python @@ -1954,30 +2261,37 @@ async def get_adaptive_mt_dataset( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - async def sample_get_adaptive_mt_dataset(): + async def sample_create_glossary_entry(): # Create a client client = translate_v3.TranslationServiceAsyncClient() # Initialize request argument(s) - request = translate_v3.GetAdaptiveMtDatasetRequest( - name="name_value", + request = translate_v3.CreateGlossaryEntryRequest( + parent="parent_value", ) # Make the request - response = await client.get_adaptive_mt_dataset(request=request) + response = await client.create_glossary_entry(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.translate_v3.types.GetAdaptiveMtDatasetRequest, dict]]): - The request object. Request message for getting an - Adaptive MT dataset. - name (:class:`str`): - Required. Name of the dataset. In the form of - ``projects/{project-number-or-id}/locations/{location-id}/adaptiveMtDatasets/{adaptive-mt-dataset-id}`` + request (Optional[Union[google.cloud.translate_v3.types.CreateGlossaryEntryRequest, dict]]): + The request object. Request message for + CreateGlossaryEntry + parent (:class:`str`): + Required. The resource name of the + glossary to create the entry under. - This corresponds to the ``name`` field + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + glossary_entry (:class:`google.cloud.translate_v3.types.GlossaryEntry`): + Required. The glossary entry to + create + + This corresponds to the ``glossary_entry`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -1987,13 +2301,15 @@ async def sample_get_adaptive_mt_dataset(): sent along with the request as metadata. Returns: - google.cloud.translate_v3.types.AdaptiveMtDataset: - An Adaptive MT Dataset. + google.cloud.translate_v3.types.GlossaryEntry: + Represents a single entry in a + glossary. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent, glossary_entry]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2002,24 +2318,26 @@ async def sample_get_adaptive_mt_dataset(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.GetAdaptiveMtDatasetRequest): - request = adaptive_mt.GetAdaptiveMtDatasetRequest(request) + if not isinstance(request, translation_service.CreateGlossaryEntryRequest): + request = translation_service.CreateGlossaryEntryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if glossary_entry is not None: + request.glossary_entry = glossary_entry # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_adaptive_mt_dataset + self._client._transport.create_glossary_entry ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2036,19 +2354,18 @@ async def sample_get_adaptive_mt_dataset(): # Done; return the response. return response - async def list_adaptive_mt_datasets( + async def update_glossary_entry( self, request: Optional[ - Union[adaptive_mt.ListAdaptiveMtDatasetsRequest, dict] + Union[translation_service.UpdateGlossaryEntryRequest, dict] ] = None, *, - parent: Optional[str] = None, + glossary_entry: Optional[common.GlossaryEntry] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAdaptiveMtDatasetsAsyncPager: - r"""Lists all Adaptive MT datasets for which the caller - has read permission. + ) -> common.GlossaryEntry: + r"""Updates a glossary entry. .. code-block:: python @@ -2061,33 +2378,29 @@ async def list_adaptive_mt_datasets( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - async def sample_list_adaptive_mt_datasets(): + async def sample_update_glossary_entry(): # Create a client client = translate_v3.TranslationServiceAsyncClient() # Initialize request argument(s) - request = translate_v3.ListAdaptiveMtDatasetsRequest( - parent="parent_value", + request = translate_v3.UpdateGlossaryEntryRequest( ) # Make the request - page_result = client.list_adaptive_mt_datasets(request=request) + response = await client.update_glossary_entry(request=request) # Handle the response - async for response in page_result: - print(response) + print(response) Args: - request (Optional[Union[google.cloud.translate_v3.types.ListAdaptiveMtDatasetsRequest, dict]]): - The request object. Request message for listing all - Adaptive MT datasets that the requestor - has access to. - parent (:class:`str`): - Required. The resource name of the project from which to - list the Adaptive MT datasets. - ``projects/{project-number-or-id}/locations/{location-id}`` - - This corresponds to the ``parent`` field + request (Optional[Union[google.cloud.translate_v3.types.UpdateGlossaryEntryRequest, dict]]): + The request object. Request message for + UpdateGlossaryEntry + glossary_entry (:class:`google.cloud.translate_v3.types.GlossaryEntry`): + Required. The glossary entry to + update. + + This corresponds to the ``glossary_entry`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -2097,18 +2410,15 @@ async def sample_list_adaptive_mt_datasets(): sent along with the request as metadata. Returns: - google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtDatasetsAsyncPager: - A list of AdaptiveMtDatasets. - - Iterating over this object will yield - results and resolve additional pages - automatically. + google.cloud.translate_v3.types.GlossaryEntry: + Represents a single entry in a + glossary. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([glossary_entry]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2117,24 +2427,26 @@ async def sample_list_adaptive_mt_datasets(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.ListAdaptiveMtDatasetsRequest): - request = adaptive_mt.ListAdaptiveMtDatasetsRequest(request) + if not isinstance(request, translation_service.UpdateGlossaryEntryRequest): + request = translation_service.UpdateGlossaryEntryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if glossary_entry is not None: + request.glossary_entry = glossary_entry # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_adaptive_mt_datasets + self._client._transport.update_glossary_entry ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("glossary_entry.name", request.glossary_entry.name),) + ), ) # Validate the universe domain. @@ -2148,31 +2460,21 @@ async def sample_list_adaptive_mt_datasets(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAdaptiveMtDatasetsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - # Done; return the response. return response - async def adaptive_mt_translate( + async def delete_glossary_entry( self, - request: Optional[Union[adaptive_mt.AdaptiveMtTranslateRequest, dict]] = None, + request: Optional[ + Union[translation_service.DeleteGlossaryEntryRequest, dict] + ] = None, *, - parent: Optional[str] = None, - content: Optional[MutableSequence[str]] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> adaptive_mt.AdaptiveMtTranslateResponse: - r"""Translate text using Adaptive MT. + ) -> None: + r"""Deletes a single entry from the glossary .. code-block:: python @@ -2185,42 +2487,27 @@ async def adaptive_mt_translate( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - async def sample_adaptive_mt_translate(): + async def sample_delete_glossary_entry(): # Create a client client = translate_v3.TranslationServiceAsyncClient() # Initialize request argument(s) - request = translate_v3.AdaptiveMtTranslateRequest( - parent="parent_value", - dataset="dataset_value", - content=['content_value1', 'content_value2'], + request = translate_v3.DeleteGlossaryEntryRequest( + name="name_value", ) # Make the request - response = await client.adaptive_mt_translate(request=request) - - # Handle the response - print(response) + await client.delete_glossary_entry(request=request) Args: - request (Optional[Union[google.cloud.translate_v3.types.AdaptiveMtTranslateRequest, dict]]): - The request object. The request for sending an AdaptiveMt - translation query. - parent (:class:`str`): - Required. Location to make a regional call. - - Format: - ``projects/{project-number-or-id}/locations/{location-id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - content (:class:`MutableSequence[str]`): - Required. The content of the input in - string format. For now only one sentence - per request is supported. + request (Optional[Union[google.cloud.translate_v3.types.DeleteGlossaryEntryRequest, dict]]): + The request object. Request message for Delete Glossary + Entry + name (:class:`str`): + Required. The resource name of the + glossary entry to delete - This corresponds to the ``content`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -2228,15 +2515,11 @@ async def sample_adaptive_mt_translate(): timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.cloud.translate_v3.types.AdaptiveMtTranslateResponse: - An AdaptiveMtTranslate response. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, content]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2245,52 +2528,48 @@ async def sample_adaptive_mt_translate(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.AdaptiveMtTranslateRequest): - request = adaptive_mt.AdaptiveMtTranslateRequest(request) + if not isinstance(request, translation_service.DeleteGlossaryEntryRequest): + request = translation_service.DeleteGlossaryEntryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent - if content: - request.content.extend(content) + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.adaptive_mt_translate + self._client._transport.delete_glossary_entry ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - response = await rpc( + await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # Done; return the response. - return response - - async def get_adaptive_mt_file( + async def create_dataset( self, - request: Optional[Union[adaptive_mt.GetAdaptiveMtFileRequest, dict]] = None, + request: Optional[Union[automl_translation.CreateDatasetRequest, dict]] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, + dataset: Optional[automl_translation.Dataset] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> adaptive_mt.AdaptiveMtFile: - r"""Gets and AdaptiveMtFile + ) -> operation_async.AsyncOperation: + r"""Creates a Dataset. .. code-block:: python @@ -2303,30 +2582,36 @@ async def get_adaptive_mt_file( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - async def sample_get_adaptive_mt_file(): + async def sample_create_dataset(): # Create a client client = translate_v3.TranslationServiceAsyncClient() # Initialize request argument(s) - request = translate_v3.GetAdaptiveMtFileRequest( - name="name_value", + request = translate_v3.CreateDatasetRequest( + parent="parent_value", ) # Make the request - response = await client.get_adaptive_mt_file(request=request) + operation = client.create_dataset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response print(response) Args: - request (Optional[Union[google.cloud.translate_v3.types.GetAdaptiveMtFileRequest, dict]]): - The request object. The request for getting an - AdaptiveMtFile. - name (:class:`str`): - Required. The resource name of the file, in form of - ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` - - This corresponds to the ``name`` field + request (Optional[Union[google.cloud.translate_v3.types.CreateDatasetRequest, dict]]): + The request object. Request message for CreateDataset. + parent (:class:`str`): + Required. The project name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dataset (:class:`google.cloud.translate_v3.types.Dataset`): + Required. The Dataset to create. + This corresponds to the ``dataset`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -2336,13 +2621,17 @@ async def sample_get_adaptive_mt_file(): sent along with the request as metadata. Returns: - google.cloud.translate_v3.types.AdaptiveMtFile: - An AdaptiveMtFile. + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.translate_v3.types.Dataset` A dataset that hosts the examples (sentence pairs) used for translation + models. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent, dataset]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2351,24 +2640,26 @@ async def sample_get_adaptive_mt_file(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.GetAdaptiveMtFileRequest): - request = adaptive_mt.GetAdaptiveMtFileRequest(request) + if not isinstance(request, automl_translation.CreateDatasetRequest): + request = automl_translation.CreateDatasetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if dataset is not None: + request.dataset = dataset # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_adaptive_mt_file + self._client._transport.create_dataset ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2382,19 +2673,27 @@ async def sample_get_adaptive_mt_file(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + automl_translation.Dataset, + metadata_type=automl_translation.CreateDatasetMetadata, + ) + # Done; return the response. return response - async def delete_adaptive_mt_file( + async def get_dataset( self, - request: Optional[Union[adaptive_mt.DeleteAdaptiveMtFileRequest, dict]] = None, + request: Optional[Union[automl_translation.GetDatasetRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an AdaptiveMtFile along with its sentences. + ) -> automl_translation.Dataset: + r"""Gets a Dataset. .. code-block:: python @@ -2407,26 +2706,27 @@ async def delete_adaptive_mt_file( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - async def sample_delete_adaptive_mt_file(): + async def sample_get_dataset(): # Create a client client = translate_v3.TranslationServiceAsyncClient() # Initialize request argument(s) - request = translate_v3.DeleteAdaptiveMtFileRequest( + request = translate_v3.GetDatasetRequest( name="name_value", ) # Make the request - await client.delete_adaptive_mt_file(request=request) + response = await client.get_dataset(request=request) + + # Handle the response + print(response) Args: - request (Optional[Union[google.cloud.translate_v3.types.DeleteAdaptiveMtFileRequest, dict]]): - The request object. The request for deleting an - AdaptiveMt file. + request (Optional[Union[google.cloud.translate_v3.types.GetDatasetRequest, dict]]): + The request object. Request message for GetDataset. name (:class:`str`): - Required. The resource name of the file to delete, in - form of - ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + Required. The resource name of the + dataset to retrieve. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2436,6 +2736,13 @@ async def sample_delete_adaptive_mt_file(): timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.Dataset: + A dataset that hosts the examples + (sentence pairs) used for translation + models. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -2449,8 +2756,8 @@ async def sample_delete_adaptive_mt_file(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.DeleteAdaptiveMtFileRequest): - request = adaptive_mt.DeleteAdaptiveMtFileRequest(request) + if not isinstance(request, automl_translation.GetDatasetRequest): + request = automl_translation.GetDatasetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2460,7 +2767,7 @@ async def sample_delete_adaptive_mt_file(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_adaptive_mt_file + self._client._transport.get_dataset ] # Certain fields should be provided within the metadata header; @@ -2473,24 +2780,26 @@ async def sample_delete_adaptive_mt_file(): self._client._validate_universe_domain() # Send the request. - await rpc( + response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - async def import_adaptive_mt_file( + # Done; return the response. + return response + + async def list_datasets( self, - request: Optional[Union[adaptive_mt.ImportAdaptiveMtFileRequest, dict]] = None, + request: Optional[Union[automl_translation.ListDatasetsRequest, dict]] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> adaptive_mt.ImportAdaptiveMtFileResponse: - r"""Imports an AdaptiveMtFile and adds all of its - sentences into the AdaptiveMtDataset. + ) -> pagers.ListDatasetsAsyncPager: + r"""Lists datasets. .. code-block:: python @@ -2503,35 +2812,28 @@ async def import_adaptive_mt_file( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - async def sample_import_adaptive_mt_file(): + async def sample_list_datasets(): # Create a client client = translate_v3.TranslationServiceAsyncClient() # Initialize request argument(s) - file_input_source = translate_v3.FileInputSource() - file_input_source.mime_type = "mime_type_value" - file_input_source.content = b'content_blob' - file_input_source.display_name = "display_name_value" - - request = translate_v3.ImportAdaptiveMtFileRequest( - file_input_source=file_input_source, + request = translate_v3.ListDatasetsRequest( parent="parent_value", ) # Make the request - response = await client.import_adaptive_mt_file(request=request) + page_result = client.list_datasets(request=request) # Handle the response - print(response) + async for response in page_result: + print(response) Args: - request (Optional[Union[google.cloud.translate_v3.types.ImportAdaptiveMtFileRequest, dict]]): - The request object. The request for importing an - AdaptiveMt file along with its - sentences. + request (Optional[Union[google.cloud.translate_v3.types.ListDatasetsRequest, dict]]): + The request object. Request message for ListDatasets. parent (:class:`str`): - Required. The resource name of the file, in form of - ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}`` + Required. Name of the parent project. In form of + ``projects/{project-number-or-id}/locations/{location-id}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2543,9 +2845,12 @@ async def sample_import_adaptive_mt_file(): sent along with the request as metadata. Returns: - google.cloud.translate_v3.types.ImportAdaptiveMtFileResponse: - The response for importing an - AdaptiveMtFile + google.cloud.translate_v3.services.translation_service.pagers.ListDatasetsAsyncPager: + Response message for ListDatasets. + + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. @@ -2560,8 +2865,8 @@ async def sample_import_adaptive_mt_file(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.ImportAdaptiveMtFileRequest): - request = adaptive_mt.ImportAdaptiveMtFileRequest(request) + if not isinstance(request, automl_translation.ListDatasetsRequest): + request = automl_translation.ListDatasetsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2571,7 +2876,7 @@ async def sample_import_adaptive_mt_file(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.import_adaptive_mt_file + self._client._transport.list_datasets ] # Certain fields should be provided within the metadata header; @@ -2591,20 +2896,30 @@ async def sample_import_adaptive_mt_file(): metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDatasetsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response - async def list_adaptive_mt_files( + async def delete_dataset( self, - request: Optional[Union[adaptive_mt.ListAdaptiveMtFilesRequest, dict]] = None, + request: Optional[Union[automl_translation.DeleteDatasetRequest, dict]] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAdaptiveMtFilesAsyncPager: - r"""Lists all AdaptiveMtFiles associated to an - AdaptiveMtDataset. + ) -> operation_async.AsyncOperation: + r"""Deletes a dataset and all of its contents. .. code-block:: python @@ -2617,32 +2932,33 @@ async def list_adaptive_mt_files( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - async def sample_list_adaptive_mt_files(): + async def sample_delete_dataset(): # Create a client client = translate_v3.TranslationServiceAsyncClient() # Initialize request argument(s) - request = translate_v3.ListAdaptiveMtFilesRequest( - parent="parent_value", + request = translate_v3.DeleteDatasetRequest( + name="name_value", ) # Make the request - page_result = client.list_adaptive_mt_files(request=request) + operation = client.delete_dataset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response - async for response in page_result: - print(response) + print(response) Args: - request (Optional[Union[google.cloud.translate_v3.types.ListAdaptiveMtFilesRequest, dict]]): - The request object. The request to list all AdaptiveMt - files under a given dataset. - parent (:class:`str`): - Required. The resource name of the project from which to - list the Adaptive MT files. - ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}`` + request (Optional[Union[google.cloud.translate_v3.types.DeleteDatasetRequest, dict]]): + The request object. Request message for DeleteDataset. + name (:class:`str`): + Required. The name of the dataset to + delete. - This corresponds to the ``parent`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -2652,18 +2968,25 @@ async def sample_list_adaptive_mt_files(): sent along with the request as metadata. Returns: - google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtFilesAsyncPager: - The response for listing all - AdaptiveMt files under a given dataset. - Iterating over this object will yield - results and resolve additional pages - automatically. + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2672,24 +2995,24 @@ async def sample_list_adaptive_mt_files(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.ListAdaptiveMtFilesRequest): - request = adaptive_mt.ListAdaptiveMtFilesRequest(request) + if not isinstance(request, automl_translation.DeleteDatasetRequest): + request = automl_translation.DeleteDatasetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_adaptive_mt_files + self._client._transport.delete_dataset ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2703,33 +3026,30 @@ async def sample_list_adaptive_mt_files(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAdaptiveMtFilesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=automl_translation.DeleteDatasetMetadata, ) # Done; return the response. return response - async def list_adaptive_mt_sentences( + async def create_adaptive_mt_dataset( self, request: Optional[ - Union[adaptive_mt.ListAdaptiveMtSentencesRequest, dict] + Union[adaptive_mt.CreateAdaptiveMtDatasetRequest, dict] ] = None, *, parent: Optional[str] = None, + adaptive_mt_dataset: Optional[adaptive_mt.AdaptiveMtDataset] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAdaptiveMtSentencesAsyncPager: - r"""Lists all AdaptiveMtSentences under a given - file/dataset. + ) -> adaptive_mt.AdaptiveMtDataset: + r"""Creates an Adaptive MT dataset. .. code-block:: python @@ -2742,38 +3062,43 @@ async def list_adaptive_mt_sentences( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - async def sample_list_adaptive_mt_sentences(): + async def sample_create_adaptive_mt_dataset(): # Create a client client = translate_v3.TranslationServiceAsyncClient() # Initialize request argument(s) - request = translate_v3.ListAdaptiveMtSentencesRequest( + adaptive_mt_dataset = translate_v3.AdaptiveMtDataset() + adaptive_mt_dataset.name = "name_value" + + request = translate_v3.CreateAdaptiveMtDatasetRequest( parent="parent_value", + adaptive_mt_dataset=adaptive_mt_dataset, ) # Make the request - page_result = client.list_adaptive_mt_sentences(request=request) + response = await client.create_adaptive_mt_dataset(request=request) # Handle the response - async for response in page_result: - print(response) + print(response) Args: - request (Optional[Union[google.cloud.translate_v3.types.ListAdaptiveMtSentencesRequest, dict]]): - The request object. The request for listing Adaptive MT - sentences from a Dataset/File. + request (Optional[Union[google.cloud.translate_v3.types.CreateAdaptiveMtDatasetRequest, dict]]): + The request object. Request message for creating an + AdaptiveMtDataset. parent (:class:`str`): - Required. The resource name of the project from which to - list the Adaptive MT files. The following format lists - all sentences under a file. - ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` - The following format lists all sentences within a - dataset. - ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}`` + Required. Name of the parent project. In form of + ``projects/{project-number-or-id}/locations/{location-id}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + adaptive_mt_dataset (:class:`google.cloud.translate_v3.types.AdaptiveMtDataset`): + Required. The AdaptiveMtDataset to be + created. + + This corresponds to the ``adaptive_mt_dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2781,18 +3106,13 @@ async def sample_list_adaptive_mt_sentences(): sent along with the request as metadata. Returns: - google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtSentencesAsyncPager: - List AdaptiveMt sentences response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - + google.cloud.translate_v3.types.AdaptiveMtDataset: + An Adaptive MT Dataset. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([parent, adaptive_mt_dataset]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2801,18 +3121,20 @@ async def sample_list_adaptive_mt_sentences(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.ListAdaptiveMtSentencesRequest): - request = adaptive_mt.ListAdaptiveMtSentencesRequest(request) + if not isinstance(request, adaptive_mt.CreateAdaptiveMtDatasetRequest): + request = adaptive_mt.CreateAdaptiveMtDatasetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent + if adaptive_mt_dataset is not None: + request.adaptive_mt_dataset = adaptive_mt_dataset # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_adaptive_mt_sentences + self._client._transport.create_adaptive_mt_dataset ] # Certain fields should be provided within the metadata header; @@ -2832,12 +3154,2294 @@ async def sample_list_adaptive_mt_sentences(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAdaptiveMtSentencesAsyncPager( - method=rpc, - request=request, - response=response, + # Done; return the response. + return response + + async def delete_adaptive_mt_dataset( + self, + request: Optional[ + Union[adaptive_mt.DeleteAdaptiveMtDatasetRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an Adaptive MT dataset, including all its + entries and associated metadata. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_delete_adaptive_mt_dataset(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.DeleteAdaptiveMtDatasetRequest( + name="name_value", + ) + + # Make the request + await client.delete_adaptive_mt_dataset(request=request) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.DeleteAdaptiveMtDatasetRequest, dict]]): + The request object. Request message for deleting an + AdaptiveMtDataset. + name (:class:`str`): + Required. Name of the dataset. In the form of + ``projects/{project-number-or-id}/locations/{location-id}/adaptiveMtDatasets/{adaptive-mt-dataset-id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.DeleteAdaptiveMtDatasetRequest): + request = adaptive_mt.DeleteAdaptiveMtDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_adaptive_mt_dataset + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_adaptive_mt_dataset( + self, + request: Optional[Union[adaptive_mt.GetAdaptiveMtDatasetRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtDataset: + r"""Gets the Adaptive MT dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_get_adaptive_mt_dataset(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.GetAdaptiveMtDatasetRequest( + name="name_value", + ) + + # Make the request + response = await client.get_adaptive_mt_dataset(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.GetAdaptiveMtDatasetRequest, dict]]): + The request object. Request message for getting an + Adaptive MT dataset. + name (:class:`str`): + Required. Name of the dataset. In the form of + ``projects/{project-number-or-id}/locations/{location-id}/adaptiveMtDatasets/{adaptive-mt-dataset-id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.AdaptiveMtDataset: + An Adaptive MT Dataset. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.GetAdaptiveMtDatasetRequest): + request = adaptive_mt.GetAdaptiveMtDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_adaptive_mt_dataset + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_adaptive_mt_datasets( + self, + request: Optional[ + Union[adaptive_mt.ListAdaptiveMtDatasetsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdaptiveMtDatasetsAsyncPager: + r"""Lists all Adaptive MT datasets for which the caller + has read permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_list_adaptive_mt_datasets(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_datasets(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.ListAdaptiveMtDatasetsRequest, dict]]): + The request object. Request message for listing all + Adaptive MT datasets that the requestor + has access to. + parent (:class:`str`): + Required. The resource name of the project from which to + list the Adaptive MT datasets. + ``projects/{project-number-or-id}/locations/{location-id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtDatasetsAsyncPager: + A list of AdaptiveMtDatasets. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.ListAdaptiveMtDatasetsRequest): + request = adaptive_mt.ListAdaptiveMtDatasetsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_adaptive_mt_datasets + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAdaptiveMtDatasetsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def adaptive_mt_translate( + self, + request: Optional[Union[adaptive_mt.AdaptiveMtTranslateRequest, dict]] = None, + *, + parent: Optional[str] = None, + content: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtTranslateResponse: + r"""Translate text using Adaptive MT. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_adaptive_mt_translate(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.AdaptiveMtTranslateRequest( + parent="parent_value", + dataset="dataset_value", + content=['content_value1', 'content_value2'], + ) + + # Make the request + response = await client.adaptive_mt_translate(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.AdaptiveMtTranslateRequest, dict]]): + The request object. The request for sending an AdaptiveMt + translation query. + parent (:class:`str`): + Required. Location to make a regional call. + + Format: + ``projects/{project-number-or-id}/locations/{location-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + content (:class:`MutableSequence[str]`): + Required. The content of the input in + string format. + + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.AdaptiveMtTranslateResponse: + An AdaptiveMtTranslate response. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, content]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.AdaptiveMtTranslateRequest): + request = adaptive_mt.AdaptiveMtTranslateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if content: + request.content.extend(content) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.adaptive_mt_translate + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_adaptive_mt_file( + self, + request: Optional[Union[adaptive_mt.GetAdaptiveMtFileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtFile: + r"""Gets and AdaptiveMtFile + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_get_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.GetAdaptiveMtFileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_adaptive_mt_file(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.GetAdaptiveMtFileRequest, dict]]): + The request object. The request for getting an + AdaptiveMtFile. + name (:class:`str`): + Required. The resource name of the file, in form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.AdaptiveMtFile: + An AdaptiveMtFile. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.GetAdaptiveMtFileRequest): + request = adaptive_mt.GetAdaptiveMtFileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_adaptive_mt_file + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_adaptive_mt_file( + self, + request: Optional[Union[adaptive_mt.DeleteAdaptiveMtFileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an AdaptiveMtFile along with its sentences. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_delete_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.DeleteAdaptiveMtFileRequest( + name="name_value", + ) + + # Make the request + await client.delete_adaptive_mt_file(request=request) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.DeleteAdaptiveMtFileRequest, dict]]): + The request object. The request for deleting an + AdaptiveMt file. + name (:class:`str`): + Required. The resource name of the file to delete, in + form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.DeleteAdaptiveMtFileRequest): + request = adaptive_mt.DeleteAdaptiveMtFileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_adaptive_mt_file + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def import_adaptive_mt_file( + self, + request: Optional[Union[adaptive_mt.ImportAdaptiveMtFileRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.ImportAdaptiveMtFileResponse: + r"""Imports an AdaptiveMtFile and adds all of its + sentences into the AdaptiveMtDataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_import_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + file_input_source = translate_v3.FileInputSource() + file_input_source.mime_type = "mime_type_value" + file_input_source.content = b'content_blob' + file_input_source.display_name = "display_name_value" + + request = translate_v3.ImportAdaptiveMtFileRequest( + file_input_source=file_input_source, + parent="parent_value", + ) + + # Make the request + response = await client.import_adaptive_mt_file(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.ImportAdaptiveMtFileRequest, dict]]): + The request object. The request for importing an + AdaptiveMt file along with its + sentences. + parent (:class:`str`): + Required. The resource name of the file, in form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.ImportAdaptiveMtFileResponse: + The response for importing an + AdaptiveMtFile + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.ImportAdaptiveMtFileRequest): + request = adaptive_mt.ImportAdaptiveMtFileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.import_adaptive_mt_file + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_adaptive_mt_files( + self, + request: Optional[Union[adaptive_mt.ListAdaptiveMtFilesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdaptiveMtFilesAsyncPager: + r"""Lists all AdaptiveMtFiles associated to an + AdaptiveMtDataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_list_adaptive_mt_files(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtFilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_files(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.ListAdaptiveMtFilesRequest, dict]]): + The request object. The request to list all AdaptiveMt + files under a given dataset. + parent (:class:`str`): + Required. The resource name of the project from which to + list the Adaptive MT files. + ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtFilesAsyncPager: + The response for listing all + AdaptiveMt files under a given dataset. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.ListAdaptiveMtFilesRequest): + request = adaptive_mt.ListAdaptiveMtFilesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_adaptive_mt_files + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAdaptiveMtFilesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_adaptive_mt_sentences( + self, + request: Optional[ + Union[adaptive_mt.ListAdaptiveMtSentencesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdaptiveMtSentencesAsyncPager: + r"""Lists all AdaptiveMtSentences under a given + file/dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_list_adaptive_mt_sentences(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtSentencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_sentences(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.ListAdaptiveMtSentencesRequest, dict]]): + The request object. The request for listing Adaptive MT + sentences from a Dataset/File. + parent (:class:`str`): + Required. The resource name of the project from which to + list the Adaptive MT files. The following format lists + all sentences under a file. + ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + The following format lists all sentences within a + dataset. + ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtSentencesAsyncPager: + List AdaptiveMt sentences response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.ListAdaptiveMtSentencesRequest): + request = adaptive_mt.ListAdaptiveMtSentencesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_adaptive_mt_sentences + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAdaptiveMtSentencesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def import_data( + self, + request: Optional[Union[automl_translation.ImportDataRequest, dict]] = None, + *, + dataset: Optional[str] = None, + input_config: Optional[automl_translation.DatasetInputConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Import sentence pairs into translation Dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_import_data(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ImportDataRequest( + dataset="dataset_value", + ) + + # Make the request + operation = client.import_data(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.ImportDataRequest, dict]]): + The request object. Request message for ImportData. + dataset (:class:`str`): + Required. Name of the dataset. In form of + ``projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`` + + This corresponds to the ``dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + input_config (:class:`google.cloud.translate_v3.types.DatasetInputConfig`): + Required. The config for the input + content. + + This corresponds to the ``input_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([dataset, input_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, automl_translation.ImportDataRequest): + request = automl_translation.ImportDataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dataset is not None: + request.dataset = dataset + if input_config is not None: + request.input_config = input_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.import_data + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("dataset", request.dataset),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=automl_translation.ImportDataMetadata, + ) + + # Done; return the response. + return response + + async def export_data( + self, + request: Optional[Union[automl_translation.ExportDataRequest, dict]] = None, + *, + dataset: Optional[str] = None, + output_config: Optional[automl_translation.DatasetOutputConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports dataset's data to the provided output + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_export_data(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + output_config = translate_v3.DatasetOutputConfig() + output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" + + request = translate_v3.ExportDataRequest( + dataset="dataset_value", + output_config=output_config, + ) + + # Make the request + operation = client.export_data(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.ExportDataRequest, dict]]): + The request object. Request message for ExportData. + dataset (:class:`str`): + Required. Name of the dataset. In form of + ``projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`` + + This corresponds to the ``dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + output_config (:class:`google.cloud.translate_v3.types.DatasetOutputConfig`): + Required. The config for the output + content. + + This corresponds to the ``output_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([dataset, output_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, automl_translation.ExportDataRequest): + request = automl_translation.ExportDataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dataset is not None: + request.dataset = dataset + if output_config is not None: + request.output_config = output_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.export_data + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("dataset", request.dataset),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=automl_translation.ExportDataMetadata, + ) + + # Done; return the response. + return response + + async def list_examples( + self, + request: Optional[Union[automl_translation.ListExamplesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExamplesAsyncPager: + r"""Lists sentence pairs in the dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_list_examples(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ListExamplesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_examples(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.ListExamplesRequest, dict]]): + The request object. Request message for ListExamples. + parent (:class:`str`): + Required. Name of the parent dataset. In form of + ``projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListExamplesAsyncPager: + Response message for ListExamples. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, automl_translation.ListExamplesRequest): + request = automl_translation.ListExamplesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_examples + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListExamplesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_model( + self, + request: Optional[Union[automl_translation.CreateModelRequest, dict]] = None, + *, + parent: Optional[str] = None, + model: Optional[automl_translation.Model] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a Model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_create_model(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.CreateModelRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.CreateModelRequest, dict]]): + The request object. Request message for CreateModel. + parent (:class:`str`): + Required. The project name, in form of + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + model (:class:`google.cloud.translate_v3.types.Model`): + Required. The Model to create. + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.translate_v3.types.Model` A trained + translation model. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, model]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, automl_translation.CreateModelRequest): + request = automl_translation.CreateModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if model is not None: + request.model = model + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_model + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + automl_translation.Model, + metadata_type=automl_translation.CreateModelMetadata, + ) + + # Done; return the response. + return response + + async def list_models( + self, + request: Optional[Union[automl_translation.ListModelsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListModelsAsyncPager: + r"""Lists models. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_list_models(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ListModelsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.ListModelsRequest, dict]]): + The request object. Request message for ListModels. + parent (:class:`str`): + Required. Name of the parent project. In form of + ``projects/{project-number-or-id}/locations/{location-id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListModelsAsyncPager: + Response message for ListModels. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, automl_translation.ListModelsRequest): + request = automl_translation.ListModelsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_models + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListModelsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_model( + self, + request: Optional[Union[automl_translation.GetModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> automl_translation.Model: + r"""Gets a model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_get_model(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.GetModelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_model(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.GetModelRequest, dict]]): + The request object. Request message for GetModel. + name (:class:`str`): + Required. The resource name of the + model to retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.Model: + A trained translation model. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, automl_translation.GetModelRequest): + request = automl_translation.GetModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_model + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_model( + self, + request: Optional[Union[automl_translation.DeleteModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_delete_model(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.DeleteModelRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.DeleteModelRequest, dict]]): + The request object. Request message for DeleteModel. + name (:class:`str`): + Required. The name of the model to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, automl_translation.DeleteModelRequest): + request = automl_translation.DeleteModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_model + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=automl_translation.DeleteModelMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def wait_operation( + self, + request: Optional[operations_pb2.WaitOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.wait_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/client.py b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/client.py index df77bbdf9805..94c80ddde495 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/client.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/client.py @@ -50,10 +50,21 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.translate_v3.services.translation_service import pagers -from google.cloud.translate_v3.types import adaptive_mt, translation_service +from google.cloud.translate_v3.types import ( + adaptive_mt, + automl_translation, + common, + translation_service, +) from .transports.base import DEFAULT_CLIENT_INFO, TranslationServiceTransport from .transports.grpc import TranslationServiceGrpcTransport @@ -259,6 +270,52 @@ def parse_adaptive_mt_sentence_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def dataset_path( + project: str, + location: str, + dataset: str, + ) -> str: + """Returns a fully-qualified dataset string.""" + return "projects/{project}/locations/{location}/datasets/{dataset}".format( + project=project, + location=location, + dataset=dataset, + ) + + @staticmethod + def parse_dataset_path(path: str) -> Dict[str, str]: + """Parses a dataset path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def example_path( + project: str, + location: str, + dataset: str, + example: str, + ) -> str: + """Returns a fully-qualified example string.""" + return "projects/{project}/locations/{location}/datasets/{dataset}/examples/{example}".format( + project=project, + location=location, + dataset=dataset, + example=example, + ) + + @staticmethod + def parse_example_path(path: str) -> Dict[str, str]: + """Parses a example path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)/examples/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def glossary_path( project: str, @@ -281,6 +338,52 @@ def parse_glossary_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def glossary_entry_path( + project: str, + location: str, + glossary: str, + glossary_entry: str, + ) -> str: + """Returns a fully-qualified glossary_entry string.""" + return "projects/{project}/locations/{location}/glossaries/{glossary}/glossaryEntries/{glossary_entry}".format( + project=project, + location=location, + glossary=glossary, + glossary_entry=glossary_entry, + ) + + @staticmethod + def parse_glossary_entry_path(path: str) -> Dict[str, str]: + """Parses a glossary_entry path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)/glossaryEntries/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def model_path( + project: str, + location: str, + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "projects/{project}/locations/{location}/models/{model}".format( + project=project, + location=location, + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/models/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -858,6 +961,9 @@ def sample_translate_text(): - General (built-in) models: ``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``, + - Translation LLM models: + ``projects/{project-number-or-id}/locations/{location-id}/models/general/translation-llm``, + For global (non-regionalized) requests, use ``location-id`` ``global``. For example, ``projects/{project-number-or-id}/locations/global/models/general/nmt``. @@ -962,6 +1068,129 @@ def sample_translate_text(): # Done; return the response. return response + def romanize_text( + self, + request: Optional[Union[translation_service.RomanizeTextRequest, dict]] = None, + *, + parent: Optional[str] = None, + contents: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> translation_service.RomanizeTextResponse: + r"""Romanize input text written in non-Latin scripts to + Latin text. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_romanize_text(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.RomanizeTextRequest( + parent="parent_value", + contents=['contents_value1', 'contents_value2'], + ) + + # Make the request + response = client.romanize_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.RomanizeTextRequest, dict]): + The request object. The request message for synchronous + romanization. + parent (str): + Required. Project or location to make a call. Must refer + to a caller's project. + + Format: + ``projects/{project-number-or-id}/locations/{location-id}`` + or ``projects/{project-number-or-id}``. + + For global calls, use + ``projects/{project-number-or-id}/locations/global`` or + ``projects/{project-number-or-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (MutableSequence[str]): + Required. The content of the input in + string format. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.RomanizeTextResponse: + The response message for synchronous + romanization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, translation_service.RomanizeTextRequest): + request = translation_service.RomanizeTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if contents is not None: + request.contents = contents + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.romanize_text] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def detect_language( self, request: Optional[ @@ -1818,19 +2047,20 @@ def sample_create_glossary(): # Done; return the response. return response - def list_glossaries( + def update_glossary( self, request: Optional[ - Union[translation_service.ListGlossariesRequest, dict] + Union[translation_service.UpdateGlossaryRequest, dict] ] = None, *, - parent: Optional[str] = None, + glossary: Optional[translation_service.Glossary] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListGlossariesPager: - r"""Lists glossaries in a project. Returns NOT_FOUND, if the project - doesn't exist. + ) -> operation.Operation: + r"""Updates a glossary. A LRO is used since the update + can be async if the glossary's entry file is updated. .. code-block:: python @@ -1843,31 +2073,44 @@ def list_glossaries( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - def sample_list_glossaries(): + def sample_update_glossary(): # Create a client client = translate_v3.TranslationServiceClient() # Initialize request argument(s) - request = translate_v3.ListGlossariesRequest( - parent="parent_value", + glossary = translate_v3.Glossary() + glossary.name = "name_value" + + request = translate_v3.UpdateGlossaryRequest( + glossary=glossary, ) # Make the request - page_result = client.list_glossaries(request=request) + operation = client.update_glossary(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.cloud.translate_v3.types.ListGlossariesRequest, dict]): - The request object. Request message for ListGlossaries. - parent (str): - Required. The name of the project - from which to list all of the - glossaries. + request (Union[google.cloud.translate_v3.types.UpdateGlossaryRequest, dict]): + The request object. Request message for the update + glossary flow + glossary (google.cloud.translate_v3.types.Glossary): + Required. The glossary entry to + update. - This corresponds to the ``parent`` field + This corresponds to the ``glossary`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be updated. Currently only + ``display_name`` and 'input_config' + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1877,18 +2120,18 @@ def sample_list_glossaries(): sent along with the request as metadata. Returns: - google.cloud.translate_v3.services.translation_service.pagers.ListGlossariesPager: - Response message for ListGlossaries. + google.api_core.operation.Operation: + An object representing a long-running operation. - Iterating over this object will yield - results and resolve additional pages - automatically. + The result type for the operation will be + :class:`google.cloud.translate_v3.types.Glossary` + Represents a glossary built from user-provided data. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([glossary, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1897,21 +2140,25 @@ def sample_list_glossaries(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, translation_service.ListGlossariesRequest): - request = translation_service.ListGlossariesRequest(request) + if not isinstance(request, translation_service.UpdateGlossaryRequest): + request = translation_service.UpdateGlossaryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if glossary is not None: + request.glossary = glossary + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_glossaries] + rpc = self._transport._wrapped_methods[self._transport.update_glossary] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("glossary.name", request.glossary.name),) + ), ) # Validate the universe domain. @@ -1925,31 +2172,30 @@ def sample_list_glossaries(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListGlossariesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + translation_service.Glossary, + metadata_type=translation_service.UpdateGlossaryMetadata, ) # Done; return the response. return response - def get_glossary( + def list_glossaries( self, - request: Optional[Union[translation_service.GetGlossaryRequest, dict]] = None, + request: Optional[ + Union[translation_service.ListGlossariesRequest, dict] + ] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> translation_service.Glossary: - r"""Gets a glossary. Returns NOT_FOUND, if the glossary doesn't - exist. + ) -> pagers.ListGlossariesPager: + r"""Lists glossaries in a project. Returns NOT_FOUND, if the project + doesn't exist. .. code-block:: python @@ -1962,14 +2208,133 @@ def get_glossary( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - def sample_get_glossary(): + def sample_list_glossaries(): # Create a client client = translate_v3.TranslationServiceClient() # Initialize request argument(s) - request = translate_v3.GetGlossaryRequest( - name="name_value", - ) + request = translate_v3.ListGlossariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossaries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.ListGlossariesRequest, dict]): + The request object. Request message for ListGlossaries. + parent (str): + Required. The name of the project + from which to list all of the + glossaries. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListGlossariesPager: + Response message for ListGlossaries. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, translation_service.ListGlossariesRequest): + request = translation_service.ListGlossariesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_glossaries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGlossariesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_glossary( + self, + request: Optional[Union[translation_service.GetGlossaryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> translation_service.Glossary: + r"""Gets a glossary. Returns NOT_FOUND, if the glossary doesn't + exist. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_get_glossary(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.GetGlossaryRequest( + name="name_value", + ) # Make the request response = client.get_glossary(request=request) @@ -2163,19 +2528,18 @@ def sample_delete_glossary(): # Done; return the response. return response - def create_adaptive_mt_dataset( + def get_glossary_entry( self, request: Optional[ - Union[adaptive_mt.CreateAdaptiveMtDatasetRequest, dict] + Union[translation_service.GetGlossaryEntryRequest, dict] ] = None, *, - parent: Optional[str] = None, - adaptive_mt_dataset: Optional[adaptive_mt.AdaptiveMtDataset] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> adaptive_mt.AdaptiveMtDataset: - r"""Creates an Adaptive MT dataset. + ) -> common.GlossaryEntry: + r"""Gets a single glossary entry by the given id. .. code-block:: python @@ -2188,41 +2552,30 @@ def create_adaptive_mt_dataset( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - def sample_create_adaptive_mt_dataset(): + def sample_get_glossary_entry(): # Create a client client = translate_v3.TranslationServiceClient() # Initialize request argument(s) - adaptive_mt_dataset = translate_v3.AdaptiveMtDataset() - adaptive_mt_dataset.name = "name_value" - - request = translate_v3.CreateAdaptiveMtDatasetRequest( - parent="parent_value", - adaptive_mt_dataset=adaptive_mt_dataset, + request = translate_v3.GetGlossaryEntryRequest( + name="name_value", ) # Make the request - response = client.create_adaptive_mt_dataset(request=request) + response = client.get_glossary_entry(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.translate_v3.types.CreateAdaptiveMtDatasetRequest, dict]): - The request object. Request message for creating an - AdaptiveMtDataset. - parent (str): - Required. Name of the parent project. In form of - ``projects/{project-number-or-id}/locations/{location-id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - adaptive_mt_dataset (google.cloud.translate_v3.types.AdaptiveMtDataset): - Required. The AdaptiveMtDataset to be - created. + request (Union[google.cloud.translate_v3.types.GetGlossaryEntryRequest, dict]): + The request object. Request message for the Get Glossary + Entry Api + name (str): + Required. The resource name of the + glossary entry to get - This corresponds to the ``adaptive_mt_dataset`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2232,13 +2585,15 @@ def sample_create_adaptive_mt_dataset(): sent along with the request as metadata. Returns: - google.cloud.translate_v3.types.AdaptiveMtDataset: - An Adaptive MT Dataset. + google.cloud.translate_v3.types.GlossaryEntry: + Represents a single entry in a + glossary. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, adaptive_mt_dataset]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2247,25 +2602,21 @@ def sample_create_adaptive_mt_dataset(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.CreateAdaptiveMtDatasetRequest): - request = adaptive_mt.CreateAdaptiveMtDatasetRequest(request) + if not isinstance(request, translation_service.GetGlossaryEntryRequest): + request = translation_service.GetGlossaryEntryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent - if adaptive_mt_dataset is not None: - request.adaptive_mt_dataset = adaptive_mt_dataset + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.create_adaptive_mt_dataset - ] + rpc = self._transport._wrapped_methods[self._transport.get_glossary_entry] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2282,19 +2633,18 @@ def sample_create_adaptive_mt_dataset(): # Done; return the response. return response - def delete_adaptive_mt_dataset( + def list_glossary_entries( self, request: Optional[ - Union[adaptive_mt.DeleteAdaptiveMtDatasetRequest, dict] + Union[translation_service.ListGlossaryEntriesRequest, dict] ] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an Adaptive MT dataset, including all its - entries and associated metadata. + ) -> pagers.ListGlossaryEntriesPager: + r"""List the entries for the glossary. .. code-block:: python @@ -2307,27 +2657,32 @@ def delete_adaptive_mt_dataset( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - def sample_delete_adaptive_mt_dataset(): + def sample_list_glossary_entries(): # Create a client client = translate_v3.TranslationServiceClient() # Initialize request argument(s) - request = translate_v3.DeleteAdaptiveMtDatasetRequest( - name="name_value", + request = translate_v3.ListGlossaryEntriesRequest( + parent="parent_value", ) # Make the request - client.delete_adaptive_mt_dataset(request=request) + page_result = client.list_glossary_entries(request=request) + + # Handle the response + for response in page_result: + print(response) Args: - request (Union[google.cloud.translate_v3.types.DeleteAdaptiveMtDatasetRequest, dict]): - The request object. Request message for deleting an - AdaptiveMtDataset. - name (str): - Required. Name of the dataset. In the form of - ``projects/{project-number-or-id}/locations/{location-id}/adaptiveMtDatasets/{adaptive-mt-dataset-id}`` + request (Union[google.cloud.translate_v3.types.ListGlossaryEntriesRequest, dict]): + The request object. Request message for + ListGlossaryEntries + parent (str): + Required. The parent glossary + resource name for listing the glossary's + entries. - This corresponds to the ``name`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2335,11 +2690,20 @@ def sample_delete_adaptive_mt_dataset(): timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListGlossaryEntriesPager: + Response message for + ListGlossaryEntries + Iterating over this object will yield + results and resolve additional pages + automatically. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2348,46 +2712,61 @@ def sample_delete_adaptive_mt_dataset(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.DeleteAdaptiveMtDatasetRequest): - request = adaptive_mt.DeleteAdaptiveMtDatasetRequest(request) + if not isinstance(request, translation_service.ListGlossaryEntriesRequest): + request = translation_service.ListGlossaryEntriesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.delete_adaptive_mt_dataset - ] + rpc = self._transport._wrapped_methods[self._transport.list_glossary_entries] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc( + response = rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - def get_adaptive_mt_dataset( + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGlossaryEntriesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_glossary_entry( self, - request: Optional[Union[adaptive_mt.GetAdaptiveMtDatasetRequest, dict]] = None, + request: Optional[ + Union[translation_service.CreateGlossaryEntryRequest, dict] + ] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, + glossary_entry: Optional[common.GlossaryEntry] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> adaptive_mt.AdaptiveMtDataset: - r"""Gets the Adaptive MT dataset. + ) -> common.GlossaryEntry: + r"""Creates a glossary entry. .. code-block:: python @@ -2400,30 +2779,37 @@ def get_adaptive_mt_dataset( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - def sample_get_adaptive_mt_dataset(): + def sample_create_glossary_entry(): # Create a client client = translate_v3.TranslationServiceClient() # Initialize request argument(s) - request = translate_v3.GetAdaptiveMtDatasetRequest( - name="name_value", + request = translate_v3.CreateGlossaryEntryRequest( + parent="parent_value", ) # Make the request - response = client.get_adaptive_mt_dataset(request=request) + response = client.create_glossary_entry(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.translate_v3.types.GetAdaptiveMtDatasetRequest, dict]): - The request object. Request message for getting an - Adaptive MT dataset. - name (str): - Required. Name of the dataset. In the form of - ``projects/{project-number-or-id}/locations/{location-id}/adaptiveMtDatasets/{adaptive-mt-dataset-id}`` + request (Union[google.cloud.translate_v3.types.CreateGlossaryEntryRequest, dict]): + The request object. Request message for + CreateGlossaryEntry + parent (str): + Required. The resource name of the + glossary to create the entry under. - This corresponds to the ``name`` field + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + glossary_entry (google.cloud.translate_v3.types.GlossaryEntry): + Required. The glossary entry to + create + + This corresponds to the ``glossary_entry`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2433,13 +2819,15 @@ def sample_get_adaptive_mt_dataset(): sent along with the request as metadata. Returns: - google.cloud.translate_v3.types.AdaptiveMtDataset: - An Adaptive MT Dataset. + google.cloud.translate_v3.types.GlossaryEntry: + Represents a single entry in a + glossary. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent, glossary_entry]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2448,21 +2836,23 @@ def sample_get_adaptive_mt_dataset(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.GetAdaptiveMtDatasetRequest): - request = adaptive_mt.GetAdaptiveMtDatasetRequest(request) + if not isinstance(request, translation_service.CreateGlossaryEntryRequest): + request = translation_service.CreateGlossaryEntryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if glossary_entry is not None: + request.glossary_entry = glossary_entry # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_adaptive_mt_dataset] + rpc = self._transport._wrapped_methods[self._transport.create_glossary_entry] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2479,19 +2869,18 @@ def sample_get_adaptive_mt_dataset(): # Done; return the response. return response - def list_adaptive_mt_datasets( + def update_glossary_entry( self, request: Optional[ - Union[adaptive_mt.ListAdaptiveMtDatasetsRequest, dict] + Union[translation_service.UpdateGlossaryEntryRequest, dict] ] = None, *, - parent: Optional[str] = None, + glossary_entry: Optional[common.GlossaryEntry] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAdaptiveMtDatasetsPager: - r"""Lists all Adaptive MT datasets for which the caller - has read permission. + ) -> common.GlossaryEntry: + r"""Updates a glossary entry. .. code-block:: python @@ -2504,33 +2893,29 @@ def list_adaptive_mt_datasets( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - def sample_list_adaptive_mt_datasets(): + def sample_update_glossary_entry(): # Create a client client = translate_v3.TranslationServiceClient() # Initialize request argument(s) - request = translate_v3.ListAdaptiveMtDatasetsRequest( - parent="parent_value", + request = translate_v3.UpdateGlossaryEntryRequest( ) # Make the request - page_result = client.list_adaptive_mt_datasets(request=request) + response = client.update_glossary_entry(request=request) # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.cloud.translate_v3.types.ListAdaptiveMtDatasetsRequest, dict]): - The request object. Request message for listing all - Adaptive MT datasets that the requestor - has access to. - parent (str): - Required. The resource name of the project from which to - list the Adaptive MT datasets. - ``projects/{project-number-or-id}/locations/{location-id}`` - - This corresponds to the ``parent`` field + request (Union[google.cloud.translate_v3.types.UpdateGlossaryEntryRequest, dict]): + The request object. Request message for + UpdateGlossaryEntry + glossary_entry (google.cloud.translate_v3.types.GlossaryEntry): + Required. The glossary entry to + update. + + This corresponds to the ``glossary_entry`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2540,18 +2925,15 @@ def sample_list_adaptive_mt_datasets(): sent along with the request as metadata. Returns: - google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtDatasetsPager: - A list of AdaptiveMtDatasets. - - Iterating over this object will yield - results and resolve additional pages - automatically. + google.cloud.translate_v3.types.GlossaryEntry: + Represents a single entry in a + glossary. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([glossary_entry]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2560,23 +2942,23 @@ def sample_list_adaptive_mt_datasets(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.ListAdaptiveMtDatasetsRequest): - request = adaptive_mt.ListAdaptiveMtDatasetsRequest(request) + if not isinstance(request, translation_service.UpdateGlossaryEntryRequest): + request = translation_service.UpdateGlossaryEntryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if glossary_entry is not None: + request.glossary_entry = glossary_entry # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.list_adaptive_mt_datasets - ] + rpc = self._transport._wrapped_methods[self._transport.update_glossary_entry] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("glossary_entry.name", request.glossary_entry.name),) + ), ) # Validate the universe domain. @@ -2590,31 +2972,21 @@ def sample_list_adaptive_mt_datasets(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAdaptiveMtDatasetsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - # Done; return the response. return response - def adaptive_mt_translate( + def delete_glossary_entry( self, - request: Optional[Union[adaptive_mt.AdaptiveMtTranslateRequest, dict]] = None, + request: Optional[ + Union[translation_service.DeleteGlossaryEntryRequest, dict] + ] = None, *, - parent: Optional[str] = None, - content: Optional[MutableSequence[str]] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> adaptive_mt.AdaptiveMtTranslateResponse: - r"""Translate text using Adaptive MT. + ) -> None: + r"""Deletes a single entry from the glossary .. code-block:: python @@ -2627,42 +2999,27 @@ def adaptive_mt_translate( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - def sample_adaptive_mt_translate(): + def sample_delete_glossary_entry(): # Create a client client = translate_v3.TranslationServiceClient() # Initialize request argument(s) - request = translate_v3.AdaptiveMtTranslateRequest( - parent="parent_value", - dataset="dataset_value", - content=['content_value1', 'content_value2'], + request = translate_v3.DeleteGlossaryEntryRequest( + name="name_value", ) # Make the request - response = client.adaptive_mt_translate(request=request) - - # Handle the response - print(response) + client.delete_glossary_entry(request=request) Args: - request (Union[google.cloud.translate_v3.types.AdaptiveMtTranslateRequest, dict]): - The request object. The request for sending an AdaptiveMt - translation query. - parent (str): - Required. Location to make a regional call. - - Format: - ``projects/{project-number-or-id}/locations/{location-id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - content (MutableSequence[str]): - Required. The content of the input in - string format. For now only one sentence - per request is supported. + request (Union[google.cloud.translate_v3.types.DeleteGlossaryEntryRequest, dict]): + The request object. Request message for Delete Glossary + Entry + name (str): + Required. The resource name of the + glossary entry to delete - This corresponds to the ``content`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2670,15 +3027,11 @@ def sample_adaptive_mt_translate(): timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.cloud.translate_v3.types.AdaptiveMtTranslateResponse: - An AdaptiveMtTranslate response. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, content]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2687,49 +3040,45 @@ def sample_adaptive_mt_translate(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.AdaptiveMtTranslateRequest): - request = adaptive_mt.AdaptiveMtTranslateRequest(request) + if not isinstance(request, translation_service.DeleteGlossaryEntryRequest): + request = translation_service.DeleteGlossaryEntryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent - if content is not None: - request.content = content + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.adaptive_mt_translate] + rpc = self._transport._wrapped_methods[self._transport.delete_glossary_entry] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - response = rpc( + rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # Done; return the response. - return response - - def get_adaptive_mt_file( + def create_dataset( self, - request: Optional[Union[adaptive_mt.GetAdaptiveMtFileRequest, dict]] = None, + request: Optional[Union[automl_translation.CreateDatasetRequest, dict]] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, + dataset: Optional[automl_translation.Dataset] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> adaptive_mt.AdaptiveMtFile: - r"""Gets and AdaptiveMtFile + ) -> operation.Operation: + r"""Creates a Dataset. .. code-block:: python @@ -2742,30 +3091,36 @@ def get_adaptive_mt_file( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - def sample_get_adaptive_mt_file(): + def sample_create_dataset(): # Create a client client = translate_v3.TranslationServiceClient() # Initialize request argument(s) - request = translate_v3.GetAdaptiveMtFileRequest( - name="name_value", + request = translate_v3.CreateDatasetRequest( + parent="parent_value", ) # Make the request - response = client.get_adaptive_mt_file(request=request) + operation = client.create_dataset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) Args: - request (Union[google.cloud.translate_v3.types.GetAdaptiveMtFileRequest, dict]): - The request object. The request for getting an - AdaptiveMtFile. - name (str): - Required. The resource name of the file, in form of - ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` - - This corresponds to the ``name`` field + request (Union[google.cloud.translate_v3.types.CreateDatasetRequest, dict]): + The request object. Request message for CreateDataset. + parent (str): + Required. The project name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dataset (google.cloud.translate_v3.types.Dataset): + Required. The Dataset to create. + This corresponds to the ``dataset`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2775,13 +3130,17 @@ def sample_get_adaptive_mt_file(): sent along with the request as metadata. Returns: - google.cloud.translate_v3.types.AdaptiveMtFile: - An AdaptiveMtFile. + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.translate_v3.types.Dataset` A dataset that hosts the examples (sentence pairs) used for translation + models. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent, dataset]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2790,21 +3149,23 @@ def sample_get_adaptive_mt_file(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.GetAdaptiveMtFileRequest): - request = adaptive_mt.GetAdaptiveMtFileRequest(request) + if not isinstance(request, automl_translation.CreateDatasetRequest): + request = automl_translation.CreateDatasetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if dataset is not None: + request.dataset = dataset # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_adaptive_mt_file] + rpc = self._transport._wrapped_methods[self._transport.create_dataset] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2818,19 +3179,27 @@ def sample_get_adaptive_mt_file(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + automl_translation.Dataset, + metadata_type=automl_translation.CreateDatasetMetadata, + ) + # Done; return the response. return response - def delete_adaptive_mt_file( + def get_dataset( self, - request: Optional[Union[adaptive_mt.DeleteAdaptiveMtFileRequest, dict]] = None, + request: Optional[Union[automl_translation.GetDatasetRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an AdaptiveMtFile along with its sentences. + ) -> automl_translation.Dataset: + r"""Gets a Dataset. .. code-block:: python @@ -2843,26 +3212,27 @@ def delete_adaptive_mt_file( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - def sample_delete_adaptive_mt_file(): + def sample_get_dataset(): # Create a client client = translate_v3.TranslationServiceClient() # Initialize request argument(s) - request = translate_v3.DeleteAdaptiveMtFileRequest( + request = translate_v3.GetDatasetRequest( name="name_value", ) # Make the request - client.delete_adaptive_mt_file(request=request) + response = client.get_dataset(request=request) + + # Handle the response + print(response) Args: - request (Union[google.cloud.translate_v3.types.DeleteAdaptiveMtFileRequest, dict]): - The request object. The request for deleting an - AdaptiveMt file. + request (Union[google.cloud.translate_v3.types.GetDatasetRequest, dict]): + The request object. Request message for GetDataset. name (str): - Required. The resource name of the file to delete, in - form of - ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + Required. The resource name of the + dataset to retrieve. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2872,6 +3242,13 @@ def sample_delete_adaptive_mt_file(): timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.Dataset: + A dataset that hosts the examples + (sentence pairs) used for translation + models. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -2885,8 +3262,8 @@ def sample_delete_adaptive_mt_file(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.DeleteAdaptiveMtFileRequest): - request = adaptive_mt.DeleteAdaptiveMtFileRequest(request) + if not isinstance(request, automl_translation.GetDatasetRequest): + request = automl_translation.GetDatasetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -2894,7 +3271,7 @@ def sample_delete_adaptive_mt_file(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_adaptive_mt_file] + rpc = self._transport._wrapped_methods[self._transport.get_dataset] # Certain fields should be provided within the metadata header; # add these here. @@ -2906,24 +3283,26 @@ def sample_delete_adaptive_mt_file(): self._validate_universe_domain() # Send the request. - rpc( + response = rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - def import_adaptive_mt_file( + # Done; return the response. + return response + + def list_datasets( self, - request: Optional[Union[adaptive_mt.ImportAdaptiveMtFileRequest, dict]] = None, + request: Optional[Union[automl_translation.ListDatasetsRequest, dict]] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> adaptive_mt.ImportAdaptiveMtFileResponse: - r"""Imports an AdaptiveMtFile and adds all of its - sentences into the AdaptiveMtDataset. + ) -> pagers.ListDatasetsPager: + r"""Lists datasets. .. code-block:: python @@ -2936,35 +3315,28 @@ def import_adaptive_mt_file( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - def sample_import_adaptive_mt_file(): + def sample_list_datasets(): # Create a client client = translate_v3.TranslationServiceClient() # Initialize request argument(s) - file_input_source = translate_v3.FileInputSource() - file_input_source.mime_type = "mime_type_value" - file_input_source.content = b'content_blob' - file_input_source.display_name = "display_name_value" - - request = translate_v3.ImportAdaptiveMtFileRequest( - file_input_source=file_input_source, + request = translate_v3.ListDatasetsRequest( parent="parent_value", ) # Make the request - response = client.import_adaptive_mt_file(request=request) + page_result = client.list_datasets(request=request) # Handle the response - print(response) + for response in page_result: + print(response) Args: - request (Union[google.cloud.translate_v3.types.ImportAdaptiveMtFileRequest, dict]): - The request object. The request for importing an - AdaptiveMt file along with its - sentences. + request (Union[google.cloud.translate_v3.types.ListDatasetsRequest, dict]): + The request object. Request message for ListDatasets. parent (str): - Required. The resource name of the file, in form of - ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}`` + Required. Name of the parent project. In form of + ``projects/{project-number-or-id}/locations/{location-id}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2976,9 +3348,12 @@ def sample_import_adaptive_mt_file(): sent along with the request as metadata. Returns: - google.cloud.translate_v3.types.ImportAdaptiveMtFileResponse: - The response for importing an - AdaptiveMtFile + google.cloud.translate_v3.services.translation_service.pagers.ListDatasetsPager: + Response message for ListDatasets. + + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. @@ -2993,8 +3368,8 @@ def sample_import_adaptive_mt_file(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.ImportAdaptiveMtFileRequest): - request = adaptive_mt.ImportAdaptiveMtFileRequest(request) + if not isinstance(request, automl_translation.ListDatasetsRequest): + request = automl_translation.ListDatasetsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -3002,7 +3377,7 @@ def sample_import_adaptive_mt_file(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.import_adaptive_mt_file] + rpc = self._transport._wrapped_methods[self._transport.list_datasets] # Certain fields should be provided within the metadata header; # add these here. @@ -3021,20 +3396,30 @@ def sample_import_adaptive_mt_file(): metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDatasetsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response - def list_adaptive_mt_files( + def delete_dataset( self, - request: Optional[Union[adaptive_mt.ListAdaptiveMtFilesRequest, dict]] = None, + request: Optional[Union[automl_translation.DeleteDatasetRequest, dict]] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAdaptiveMtFilesPager: - r"""Lists all AdaptiveMtFiles associated to an - AdaptiveMtDataset. + ) -> operation.Operation: + r"""Deletes a dataset and all of its contents. .. code-block:: python @@ -3047,32 +3432,33 @@ def list_adaptive_mt_files( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - def sample_list_adaptive_mt_files(): + def sample_delete_dataset(): # Create a client client = translate_v3.TranslationServiceClient() # Initialize request argument(s) - request = translate_v3.ListAdaptiveMtFilesRequest( - parent="parent_value", + request = translate_v3.DeleteDatasetRequest( + name="name_value", ) # Make the request - page_result = client.list_adaptive_mt_files(request=request) + operation = client.delete_dataset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.cloud.translate_v3.types.ListAdaptiveMtFilesRequest, dict]): - The request object. The request to list all AdaptiveMt - files under a given dataset. - parent (str): - Required. The resource name of the project from which to - list the Adaptive MT files. - ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}`` + request (Union[google.cloud.translate_v3.types.DeleteDatasetRequest, dict]): + The request object. Request message for DeleteDataset. + name (str): + Required. The name of the dataset to + delete. - This corresponds to the ``parent`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -3082,18 +3468,25 @@ def sample_list_adaptive_mt_files(): sent along with the request as metadata. Returns: - google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtFilesPager: - The response for listing all - AdaptiveMt files under a given dataset. - Iterating over this object will yield - results and resolve additional pages - automatically. + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3102,21 +3495,21 @@ def sample_list_adaptive_mt_files(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.ListAdaptiveMtFilesRequest): - request = adaptive_mt.ListAdaptiveMtFilesRequest(request) + if not isinstance(request, automl_translation.DeleteDatasetRequest): + request = automl_translation.DeleteDatasetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_adaptive_mt_files] + rpc = self._transport._wrapped_methods[self._transport.delete_dataset] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3130,33 +3523,30 @@ def sample_list_adaptive_mt_files(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAdaptiveMtFilesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=automl_translation.DeleteDatasetMetadata, ) # Done; return the response. return response - def list_adaptive_mt_sentences( + def create_adaptive_mt_dataset( self, request: Optional[ - Union[adaptive_mt.ListAdaptiveMtSentencesRequest, dict] + Union[adaptive_mt.CreateAdaptiveMtDatasetRequest, dict] ] = None, *, parent: Optional[str] = None, + adaptive_mt_dataset: Optional[adaptive_mt.AdaptiveMtDataset] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAdaptiveMtSentencesPager: - r"""Lists all AdaptiveMtSentences under a given - file/dataset. + ) -> adaptive_mt.AdaptiveMtDataset: + r"""Creates an Adaptive MT dataset. .. code-block:: python @@ -3169,38 +3559,43 @@ def list_adaptive_mt_sentences( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import translate_v3 - def sample_list_adaptive_mt_sentences(): + def sample_create_adaptive_mt_dataset(): # Create a client client = translate_v3.TranslationServiceClient() # Initialize request argument(s) - request = translate_v3.ListAdaptiveMtSentencesRequest( + adaptive_mt_dataset = translate_v3.AdaptiveMtDataset() + adaptive_mt_dataset.name = "name_value" + + request = translate_v3.CreateAdaptiveMtDatasetRequest( parent="parent_value", + adaptive_mt_dataset=adaptive_mt_dataset, ) # Make the request - page_result = client.list_adaptive_mt_sentences(request=request) + response = client.create_adaptive_mt_dataset(request=request) # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.cloud.translate_v3.types.ListAdaptiveMtSentencesRequest, dict]): - The request object. The request for listing Adaptive MT - sentences from a Dataset/File. + request (Union[google.cloud.translate_v3.types.CreateAdaptiveMtDatasetRequest, dict]): + The request object. Request message for creating an + AdaptiveMtDataset. parent (str): - Required. The resource name of the project from which to - list the Adaptive MT files. The following format lists - all sentences under a file. - ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` - The following format lists all sentences within a - dataset. - ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}`` + Required. Name of the parent project. In form of + ``projects/{project-number-or-id}/locations/{location-id}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + adaptive_mt_dataset (google.cloud.translate_v3.types.AdaptiveMtDataset): + Required. The AdaptiveMtDataset to be + created. + + This corresponds to the ``adaptive_mt_dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3208,18 +3603,13 @@ def sample_list_adaptive_mt_sentences(): sent along with the request as metadata. Returns: - google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtSentencesPager: - List AdaptiveMt sentences response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - + google.cloud.translate_v3.types.AdaptiveMtDataset: + An Adaptive MT Dataset. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([parent, adaptive_mt_dataset]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3228,17 +3618,19 @@ def sample_list_adaptive_mt_sentences(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, adaptive_mt.ListAdaptiveMtSentencesRequest): - request = adaptive_mt.ListAdaptiveMtSentencesRequest(request) + if not isinstance(request, adaptive_mt.CreateAdaptiveMtDatasetRequest): + request = adaptive_mt.CreateAdaptiveMtDatasetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent + if adaptive_mt_dataset is not None: + request.adaptive_mt_dataset = adaptive_mt_dataset # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ - self._transport.list_adaptive_mt_sentences + self._transport.create_adaptive_mt_dataset ] # Certain fields should be provided within the metadata header; @@ -3258,32 +3650,2272 @@ def sample_list_adaptive_mt_sentences(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAdaptiveMtSentencesPager( - method=rpc, - request=request, - response=response, + # Done; return the response. + return response + + def delete_adaptive_mt_dataset( + self, + request: Optional[ + Union[adaptive_mt.DeleteAdaptiveMtDatasetRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an Adaptive MT dataset, including all its + entries and associated metadata. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_delete_adaptive_mt_dataset(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.DeleteAdaptiveMtDatasetRequest( + name="name_value", + ) + + # Make the request + client.delete_adaptive_mt_dataset(request=request) + + Args: + request (Union[google.cloud.translate_v3.types.DeleteAdaptiveMtDatasetRequest, dict]): + The request object. Request message for deleting an + AdaptiveMtDataset. + name (str): + Required. Name of the dataset. In the form of + ``projects/{project-number-or-id}/locations/{location-id}/adaptiveMtDatasets/{adaptive-mt-dataset-id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.DeleteAdaptiveMtDatasetRequest): + request = adaptive_mt.DeleteAdaptiveMtDatasetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_adaptive_mt_dataset + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, ) - # Done; return the response. - return response + def get_adaptive_mt_dataset( + self, + request: Optional[Union[adaptive_mt.GetAdaptiveMtDatasetRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtDataset: + r"""Gets the Adaptive MT dataset. - def __enter__(self) -> "TranslationServiceClient": - return self + .. code-block:: python - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() + def sample_get_adaptive_mt_dataset(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.GetAdaptiveMtDatasetRequest( + name="name_value", + ) + + # Make the request + response = client.get_adaptive_mt_dataset(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.GetAdaptiveMtDatasetRequest, dict]): + The request object. Request message for getting an + Adaptive MT dataset. + name (str): + Required. Name of the dataset. In the form of + ``projects/{project-number-or-id}/locations/{location-id}/adaptiveMtDatasets/{adaptive-mt-dataset-id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.AdaptiveMtDataset: + An Adaptive MT Dataset. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.GetAdaptiveMtDatasetRequest): + request = adaptive_mt.GetAdaptiveMtDatasetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_adaptive_mt_dataset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_adaptive_mt_datasets( + self, + request: Optional[ + Union[adaptive_mt.ListAdaptiveMtDatasetsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdaptiveMtDatasetsPager: + r"""Lists all Adaptive MT datasets for which the caller + has read permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_list_adaptive_mt_datasets(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_datasets(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.ListAdaptiveMtDatasetsRequest, dict]): + The request object. Request message for listing all + Adaptive MT datasets that the requestor + has access to. + parent (str): + Required. The resource name of the project from which to + list the Adaptive MT datasets. + ``projects/{project-number-or-id}/locations/{location-id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtDatasetsPager: + A list of AdaptiveMtDatasets. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.ListAdaptiveMtDatasetsRequest): + request = adaptive_mt.ListAdaptiveMtDatasetsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_adaptive_mt_datasets + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAdaptiveMtDatasetsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def adaptive_mt_translate( + self, + request: Optional[Union[adaptive_mt.AdaptiveMtTranslateRequest, dict]] = None, + *, + parent: Optional[str] = None, + content: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtTranslateResponse: + r"""Translate text using Adaptive MT. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_adaptive_mt_translate(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.AdaptiveMtTranslateRequest( + parent="parent_value", + dataset="dataset_value", + content=['content_value1', 'content_value2'], + ) + + # Make the request + response = client.adaptive_mt_translate(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.AdaptiveMtTranslateRequest, dict]): + The request object. The request for sending an AdaptiveMt + translation query. + parent (str): + Required. Location to make a regional call. + + Format: + ``projects/{project-number-or-id}/locations/{location-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + content (MutableSequence[str]): + Required. The content of the input in + string format. + + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.AdaptiveMtTranslateResponse: + An AdaptiveMtTranslate response. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, content]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.AdaptiveMtTranslateRequest): + request = adaptive_mt.AdaptiveMtTranslateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if content is not None: + request.content = content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.adaptive_mt_translate] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_adaptive_mt_file( + self, + request: Optional[Union[adaptive_mt.GetAdaptiveMtFileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtFile: + r"""Gets and AdaptiveMtFile + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_get_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.GetAdaptiveMtFileRequest( + name="name_value", + ) + + # Make the request + response = client.get_adaptive_mt_file(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.GetAdaptiveMtFileRequest, dict]): + The request object. The request for getting an + AdaptiveMtFile. + name (str): + Required. The resource name of the file, in form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.AdaptiveMtFile: + An AdaptiveMtFile. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.GetAdaptiveMtFileRequest): + request = adaptive_mt.GetAdaptiveMtFileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_adaptive_mt_file] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_adaptive_mt_file( + self, + request: Optional[Union[adaptive_mt.DeleteAdaptiveMtFileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an AdaptiveMtFile along with its sentences. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_delete_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.DeleteAdaptiveMtFileRequest( + name="name_value", + ) + + # Make the request + client.delete_adaptive_mt_file(request=request) + + Args: + request (Union[google.cloud.translate_v3.types.DeleteAdaptiveMtFileRequest, dict]): + The request object. The request for deleting an + AdaptiveMt file. + name (str): + Required. The resource name of the file to delete, in + form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.DeleteAdaptiveMtFileRequest): + request = adaptive_mt.DeleteAdaptiveMtFileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_adaptive_mt_file] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def import_adaptive_mt_file( + self, + request: Optional[Union[adaptive_mt.ImportAdaptiveMtFileRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.ImportAdaptiveMtFileResponse: + r"""Imports an AdaptiveMtFile and adds all of its + sentences into the AdaptiveMtDataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_import_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + file_input_source = translate_v3.FileInputSource() + file_input_source.mime_type = "mime_type_value" + file_input_source.content = b'content_blob' + file_input_source.display_name = "display_name_value" + + request = translate_v3.ImportAdaptiveMtFileRequest( + file_input_source=file_input_source, + parent="parent_value", + ) + + # Make the request + response = client.import_adaptive_mt_file(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.ImportAdaptiveMtFileRequest, dict]): + The request object. The request for importing an + AdaptiveMt file along with its + sentences. + parent (str): + Required. The resource name of the file, in form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.ImportAdaptiveMtFileResponse: + The response for importing an + AdaptiveMtFile + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.ImportAdaptiveMtFileRequest): + request = adaptive_mt.ImportAdaptiveMtFileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_adaptive_mt_file] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_adaptive_mt_files( + self, + request: Optional[Union[adaptive_mt.ListAdaptiveMtFilesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdaptiveMtFilesPager: + r"""Lists all AdaptiveMtFiles associated to an + AdaptiveMtDataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_list_adaptive_mt_files(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtFilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_files(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.ListAdaptiveMtFilesRequest, dict]): + The request object. The request to list all AdaptiveMt + files under a given dataset. + parent (str): + Required. The resource name of the project from which to + list the Adaptive MT files. + ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtFilesPager: + The response for listing all + AdaptiveMt files under a given dataset. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.ListAdaptiveMtFilesRequest): + request = adaptive_mt.ListAdaptiveMtFilesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_adaptive_mt_files] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAdaptiveMtFilesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_adaptive_mt_sentences( + self, + request: Optional[ + Union[adaptive_mt.ListAdaptiveMtSentencesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdaptiveMtSentencesPager: + r"""Lists all AdaptiveMtSentences under a given + file/dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_list_adaptive_mt_sentences(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtSentencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_sentences(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.ListAdaptiveMtSentencesRequest, dict]): + The request object. The request for listing Adaptive MT + sentences from a Dataset/File. + parent (str): + Required. The resource name of the project from which to + list the Adaptive MT files. The following format lists + all sentences under a file. + ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + The following format lists all sentences within a + dataset. + ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtSentencesPager: + List AdaptiveMt sentences response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, adaptive_mt.ListAdaptiveMtSentencesRequest): + request = adaptive_mt.ListAdaptiveMtSentencesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_adaptive_mt_sentences + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAdaptiveMtSentencesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def import_data( + self, + request: Optional[Union[automl_translation.ImportDataRequest, dict]] = None, + *, + dataset: Optional[str] = None, + input_config: Optional[automl_translation.DatasetInputConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Import sentence pairs into translation Dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_import_data(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ImportDataRequest( + dataset="dataset_value", + ) + + # Make the request + operation = client.import_data(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.ImportDataRequest, dict]): + The request object. Request message for ImportData. + dataset (str): + Required. Name of the dataset. In form of + ``projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`` + + This corresponds to the ``dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + input_config (google.cloud.translate_v3.types.DatasetInputConfig): + Required. The config for the input + content. + + This corresponds to the ``input_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([dataset, input_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, automl_translation.ImportDataRequest): + request = automl_translation.ImportDataRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dataset is not None: + request.dataset = dataset + if input_config is not None: + request.input_config = input_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_data] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("dataset", request.dataset),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=automl_translation.ImportDataMetadata, + ) + + # Done; return the response. + return response + + def export_data( + self, + request: Optional[Union[automl_translation.ExportDataRequest, dict]] = None, + *, + dataset: Optional[str] = None, + output_config: Optional[automl_translation.DatasetOutputConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports dataset's data to the provided output + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_export_data(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + output_config = translate_v3.DatasetOutputConfig() + output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" + + request = translate_v3.ExportDataRequest( + dataset="dataset_value", + output_config=output_config, + ) + + # Make the request + operation = client.export_data(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.ExportDataRequest, dict]): + The request object. Request message for ExportData. + dataset (str): + Required. Name of the dataset. In form of + ``projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`` + + This corresponds to the ``dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + output_config (google.cloud.translate_v3.types.DatasetOutputConfig): + Required. The config for the output + content. + + This corresponds to the ``output_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([dataset, output_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, automl_translation.ExportDataRequest): + request = automl_translation.ExportDataRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dataset is not None: + request.dataset = dataset + if output_config is not None: + request.output_config = output_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_data] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("dataset", request.dataset),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=automl_translation.ExportDataMetadata, + ) + + # Done; return the response. + return response + + def list_examples( + self, + request: Optional[Union[automl_translation.ListExamplesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExamplesPager: + r"""Lists sentence pairs in the dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_list_examples(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ListExamplesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_examples(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.ListExamplesRequest, dict]): + The request object. Request message for ListExamples. + parent (str): + Required. Name of the parent dataset. In form of + ``projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListExamplesPager: + Response message for ListExamples. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, automl_translation.ListExamplesRequest): + request = automl_translation.ListExamplesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_examples] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListExamplesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_model( + self, + request: Optional[Union[automl_translation.CreateModelRequest, dict]] = None, + *, + parent: Optional[str] = None, + model: Optional[automl_translation.Model] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a Model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_create_model(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.CreateModelRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.CreateModelRequest, dict]): + The request object. Request message for CreateModel. + parent (str): + Required. The project name, in form of + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + model (google.cloud.translate_v3.types.Model): + Required. The Model to create. + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.translate_v3.types.Model` A trained + translation model. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, model]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, automl_translation.CreateModelRequest): + request = automl_translation.CreateModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if model is not None: + request.model = model + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + automl_translation.Model, + metadata_type=automl_translation.CreateModelMetadata, + ) + + # Done; return the response. + return response + + def list_models( + self, + request: Optional[Union[automl_translation.ListModelsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListModelsPager: + r"""Lists models. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_list_models(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ListModelsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.ListModelsRequest, dict]): + The request object. Request message for ListModels. + parent (str): + Required. Name of the parent project. In form of + ``projects/{project-number-or-id}/locations/{location-id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListModelsPager: + Response message for ListModels. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, automl_translation.ListModelsRequest): + request = automl_translation.ListModelsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_models] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListModelsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_model( + self, + request: Optional[Union[automl_translation.GetModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> automl_translation.Model: + r"""Gets a model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_get_model(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.GetModelRequest( + name="name_value", + ) + + # Make the request + response = client.get_model(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.GetModelRequest, dict]): + The request object. Request message for GetModel. + name (str): + Required. The resource name of the + model to retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.Model: + A trained translation model. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, automl_translation.GetModelRequest): + request = automl_translation.GetModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_model( + self, + request: Optional[Union[automl_translation.DeleteModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_delete_model(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.DeleteModelRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.DeleteModelRequest, dict]): + The request object. Request message for DeleteModel. + name (str): + Required. The name of the model to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, automl_translation.DeleteModelRequest): + request = automl_translation.DeleteModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=automl_translation.DeleteModelMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "TranslationServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def wait_operation( + self, + request: Optional[operations_pb2.WaitOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.wait_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/pagers.py b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/pagers.py index 3ce48fec9e31..dffa95ae680c 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/pagers.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/pagers.py @@ -38,7 +38,12 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.translate_v3.types import adaptive_mt, translation_service +from google.cloud.translate_v3.types import ( + adaptive_mt, + automl_translation, + common, + translation_service, +) class ListGlossariesPager: @@ -193,6 +198,314 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListGlossaryEntriesPager: + """A pager for iterating through ``list_glossary_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.translate_v3.types.ListGlossaryEntriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``glossary_entries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListGlossaryEntries`` requests and continue to iterate + through the ``glossary_entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.translate_v3.types.ListGlossaryEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., translation_service.ListGlossaryEntriesResponse], + request: translation_service.ListGlossaryEntriesRequest, + response: translation_service.ListGlossaryEntriesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.translate_v3.types.ListGlossaryEntriesRequest): + The initial request object. + response (google.cloud.translate_v3.types.ListGlossaryEntriesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = translation_service.ListGlossaryEntriesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[translation_service.ListGlossaryEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[common.GlossaryEntry]: + for page in self.pages: + yield from page.glossary_entries + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListGlossaryEntriesAsyncPager: + """A pager for iterating through ``list_glossary_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.translate_v3.types.ListGlossaryEntriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``glossary_entries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListGlossaryEntries`` requests and continue to iterate + through the ``glossary_entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.translate_v3.types.ListGlossaryEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[translation_service.ListGlossaryEntriesResponse] + ], + request: translation_service.ListGlossaryEntriesRequest, + response: translation_service.ListGlossaryEntriesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.translate_v3.types.ListGlossaryEntriesRequest): + The initial request object. + response (google.cloud.translate_v3.types.ListGlossaryEntriesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = translation_service.ListGlossaryEntriesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[translation_service.ListGlossaryEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[common.GlossaryEntry]: + async def async_generator(): + async for page in self.pages: + for response in page.glossary_entries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDatasetsPager: + """A pager for iterating through ``list_datasets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.translate_v3.types.ListDatasetsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``datasets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDatasets`` requests and continue to iterate + through the ``datasets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.translate_v3.types.ListDatasetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., automl_translation.ListDatasetsResponse], + request: automl_translation.ListDatasetsRequest, + response: automl_translation.ListDatasetsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.translate_v3.types.ListDatasetsRequest): + The initial request object. + response (google.cloud.translate_v3.types.ListDatasetsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = automl_translation.ListDatasetsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[automl_translation.ListDatasetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[automl_translation.Dataset]: + for page in self.pages: + yield from page.datasets + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDatasetsAsyncPager: + """A pager for iterating through ``list_datasets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.translate_v3.types.ListDatasetsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``datasets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDatasets`` requests and continue to iterate + through the ``datasets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.translate_v3.types.ListDatasetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[automl_translation.ListDatasetsResponse]], + request: automl_translation.ListDatasetsRequest, + response: automl_translation.ListDatasetsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.translate_v3.types.ListDatasetsRequest): + The initial request object. + response (google.cloud.translate_v3.types.ListDatasetsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = automl_translation.ListDatasetsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[automl_translation.ListDatasetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[automl_translation.Dataset]: + async def async_generator(): + async for page in self.pages: + for response in page.datasets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListAdaptiveMtDatasetsPager: """A pager for iterating through ``list_adaptive_mt_datasets`` requests. @@ -647,3 +960,307 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListExamplesPager: + """A pager for iterating through ``list_examples`` requests. + + This class thinly wraps an initial + :class:`google.cloud.translate_v3.types.ListExamplesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``examples`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListExamples`` requests and continue to iterate + through the ``examples`` field on the + corresponding responses. + + All the usual :class:`google.cloud.translate_v3.types.ListExamplesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., automl_translation.ListExamplesResponse], + request: automl_translation.ListExamplesRequest, + response: automl_translation.ListExamplesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.translate_v3.types.ListExamplesRequest): + The initial request object. + response (google.cloud.translate_v3.types.ListExamplesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = automl_translation.ListExamplesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[automl_translation.ListExamplesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[automl_translation.Example]: + for page in self.pages: + yield from page.examples + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListExamplesAsyncPager: + """A pager for iterating through ``list_examples`` requests. + + This class thinly wraps an initial + :class:`google.cloud.translate_v3.types.ListExamplesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``examples`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListExamples`` requests and continue to iterate + through the ``examples`` field on the + corresponding responses. + + All the usual :class:`google.cloud.translate_v3.types.ListExamplesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[automl_translation.ListExamplesResponse]], + request: automl_translation.ListExamplesRequest, + response: automl_translation.ListExamplesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.translate_v3.types.ListExamplesRequest): + The initial request object. + response (google.cloud.translate_v3.types.ListExamplesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = automl_translation.ListExamplesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[automl_translation.ListExamplesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[automl_translation.Example]: + async def async_generator(): + async for page in self.pages: + for response in page.examples: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListModelsPager: + """A pager for iterating through ``list_models`` requests. + + This class thinly wraps an initial + :class:`google.cloud.translate_v3.types.ListModelsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``models`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListModels`` requests and continue to iterate + through the ``models`` field on the + corresponding responses. + + All the usual :class:`google.cloud.translate_v3.types.ListModelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., automl_translation.ListModelsResponse], + request: automl_translation.ListModelsRequest, + response: automl_translation.ListModelsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.translate_v3.types.ListModelsRequest): + The initial request object. + response (google.cloud.translate_v3.types.ListModelsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = automl_translation.ListModelsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[automl_translation.ListModelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[automl_translation.Model]: + for page in self.pages: + yield from page.models + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListModelsAsyncPager: + """A pager for iterating through ``list_models`` requests. + + This class thinly wraps an initial + :class:`google.cloud.translate_v3.types.ListModelsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``models`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListModels`` requests and continue to iterate + through the ``models`` field on the + corresponding responses. + + All the usual :class:`google.cloud.translate_v3.types.ListModelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[automl_translation.ListModelsResponse]], + request: automl_translation.ListModelsRequest, + response: automl_translation.ListModelsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.translate_v3.types.ListModelsRequest): + The initial request object. + response (google.cloud.translate_v3.types.ListModelsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = automl_translation.ListModelsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[automl_translation.ListModelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[automl_translation.Model]: + async def async_generator(): + async for page in self.pages: + for response in page.models: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/base.py b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/base.py index 271e8d25503e..6322cf923b05 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/base.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/base.py @@ -22,12 +22,20 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.cloud.translate_v3 import gapic_version as package_version -from google.cloud.translate_v3.types import adaptive_mt, translation_service +from google.cloud.translate_v3.types import ( + adaptive_mt, + automl_translation, + common, + translation_service, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -138,6 +146,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.romanize_text: gapic_v1.method.wrap_method( + self.romanize_text, + default_timeout=None, + client_info=client_info, + ), self.detect_language: gapic_v1.method.wrap_method( self.detect_language, default_timeout=600.0, @@ -178,6 +191,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.update_glossary: gapic_v1.method.wrap_method( + self.update_glossary, + default_timeout=None, + client_info=client_info, + ), self.list_glossaries: gapic_v1.method.wrap_method( self.list_glossaries, default_retry=retries.Retry( @@ -223,6 +241,51 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.get_glossary_entry: gapic_v1.method.wrap_method( + self.get_glossary_entry, + default_timeout=None, + client_info=client_info, + ), + self.list_glossary_entries: gapic_v1.method.wrap_method( + self.list_glossary_entries, + default_timeout=None, + client_info=client_info, + ), + self.create_glossary_entry: gapic_v1.method.wrap_method( + self.create_glossary_entry, + default_timeout=None, + client_info=client_info, + ), + self.update_glossary_entry: gapic_v1.method.wrap_method( + self.update_glossary_entry, + default_timeout=None, + client_info=client_info, + ), + self.delete_glossary_entry: gapic_v1.method.wrap_method( + self.delete_glossary_entry, + default_timeout=None, + client_info=client_info, + ), + self.create_dataset: gapic_v1.method.wrap_method( + self.create_dataset, + default_timeout=None, + client_info=client_info, + ), + self.get_dataset: gapic_v1.method.wrap_method( + self.get_dataset, + default_timeout=None, + client_info=client_info, + ), + self.list_datasets: gapic_v1.method.wrap_method( + self.list_datasets, + default_timeout=None, + client_info=client_info, + ), + self.delete_dataset: gapic_v1.method.wrap_method( + self.delete_dataset, + default_timeout=None, + client_info=client_info, + ), self.create_adaptive_mt_dataset: gapic_v1.method.wrap_method( self.create_adaptive_mt_dataset, default_timeout=None, @@ -273,6 +336,41 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.import_data: gapic_v1.method.wrap_method( + self.import_data, + default_timeout=None, + client_info=client_info, + ), + self.export_data: gapic_v1.method.wrap_method( + self.export_data, + default_timeout=None, + client_info=client_info, + ), + self.list_examples: gapic_v1.method.wrap_method( + self.list_examples, + default_timeout=None, + client_info=client_info, + ), + self.create_model: gapic_v1.method.wrap_method( + self.create_model, + default_timeout=None, + client_info=client_info, + ), + self.list_models: gapic_v1.method.wrap_method( + self.list_models, + default_timeout=None, + client_info=client_info, + ), + self.get_model: gapic_v1.method.wrap_method( + self.get_model, + default_timeout=None, + client_info=client_info, + ), + self.delete_model: gapic_v1.method.wrap_method( + self.delete_model, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -301,6 +399,18 @@ def translate_text( ]: raise NotImplementedError() + @property + def romanize_text( + self, + ) -> Callable[ + [translation_service.RomanizeTextRequest], + Union[ + translation_service.RomanizeTextResponse, + Awaitable[translation_service.RomanizeTextResponse], + ], + ]: + raise NotImplementedError() + @property def detect_language( self, @@ -364,6 +474,15 @@ def create_glossary( ]: raise NotImplementedError() + @property + def update_glossary( + self, + ) -> Callable[ + [translation_service.UpdateGlossaryRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_glossaries( self, @@ -394,6 +513,93 @@ def delete_glossary( ]: raise NotImplementedError() + @property + def get_glossary_entry( + self, + ) -> Callable[ + [translation_service.GetGlossaryEntryRequest], + Union[common.GlossaryEntry, Awaitable[common.GlossaryEntry]], + ]: + raise NotImplementedError() + + @property + def list_glossary_entries( + self, + ) -> Callable[ + [translation_service.ListGlossaryEntriesRequest], + Union[ + translation_service.ListGlossaryEntriesResponse, + Awaitable[translation_service.ListGlossaryEntriesResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_glossary_entry( + self, + ) -> Callable[ + [translation_service.CreateGlossaryEntryRequest], + Union[common.GlossaryEntry, Awaitable[common.GlossaryEntry]], + ]: + raise NotImplementedError() + + @property + def update_glossary_entry( + self, + ) -> Callable[ + [translation_service.UpdateGlossaryEntryRequest], + Union[common.GlossaryEntry, Awaitable[common.GlossaryEntry]], + ]: + raise NotImplementedError() + + @property + def delete_glossary_entry( + self, + ) -> Callable[ + [translation_service.DeleteGlossaryEntryRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def create_dataset( + self, + ) -> Callable[ + [automl_translation.CreateDatasetRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_dataset( + self, + ) -> Callable[ + [automl_translation.GetDatasetRequest], + Union[automl_translation.Dataset, Awaitable[automl_translation.Dataset]], + ]: + raise NotImplementedError() + + @property + def list_datasets( + self, + ) -> Callable[ + [automl_translation.ListDatasetsRequest], + Union[ + automl_translation.ListDatasetsResponse, + Awaitable[automl_translation.ListDatasetsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_dataset( + self, + ) -> Callable[ + [automl_translation.DeleteDatasetRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def create_adaptive_mt_dataset( self, @@ -499,6 +705,138 @@ def list_adaptive_mt_sentences( ]: raise NotImplementedError() + @property + def import_data( + self, + ) -> Callable[ + [automl_translation.ImportDataRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def export_data( + self, + ) -> Callable[ + [automl_translation.ExportDataRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_examples( + self, + ) -> Callable[ + [automl_translation.ListExamplesRequest], + Union[ + automl_translation.ListExamplesResponse, + Awaitable[automl_translation.ListExamplesResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_model( + self, + ) -> Callable[ + [automl_translation.CreateModelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_models( + self, + ) -> Callable[ + [automl_translation.ListModelsRequest], + Union[ + automl_translation.ListModelsResponse, + Awaitable[automl_translation.ListModelsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_model( + self, + ) -> Callable[ + [automl_translation.GetModelRequest], + Union[automl_translation.Model, Awaitable[automl_translation.Model]], + ]: + raise NotImplementedError() + + @property + def delete_model( + self, + ) -> Callable[ + [automl_translation.DeleteModelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def wait_operation( + self, + ) -> Callable[ + [operations_pb2.WaitOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/grpc.py b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/grpc.py index 2091e4d9d9fa..aa50816608cf 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/grpc.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/grpc.py @@ -20,11 +20,19 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore -from google.cloud.translate_v3.types import adaptive_mt, translation_service +from google.cloud.translate_v3.types import ( + adaptive_mt, + automl_translation, + common, + translation_service, +) from .base import DEFAULT_CLIENT_INFO, TranslationServiceTransport @@ -280,6 +288,36 @@ def translate_text( ) return self._stubs["translate_text"] + @property + def romanize_text( + self, + ) -> Callable[ + [translation_service.RomanizeTextRequest], + translation_service.RomanizeTextResponse, + ]: + r"""Return a callable for the romanize text method over gRPC. + + Romanize input text written in non-Latin scripts to + Latin text. + + Returns: + Callable[[~.RomanizeTextRequest], + ~.RomanizeTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "romanize_text" not in self._stubs: + self._stubs["romanize_text"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/RomanizeText", + request_serializer=translation_service.RomanizeTextRequest.serialize, + response_deserializer=translation_service.RomanizeTextResponse.deserialize, + ) + return self._stubs["romanize_text"] + @property def detect_language( self, @@ -471,6 +509,35 @@ def create_glossary( ) return self._stubs["create_glossary"] + @property + def update_glossary( + self, + ) -> Callable[ + [translation_service.UpdateGlossaryRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update glossary method over gRPC. + + Updates a glossary. A LRO is used since the update + can be async if the glossary's entry file is updated. + + Returns: + Callable[[~.UpdateGlossaryRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_glossary" not in self._stubs: + self._stubs["update_glossary"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/UpdateGlossary", + request_serializer=translation_service.UpdateGlossaryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_glossary"] + @property def list_glossaries( self, @@ -560,6 +627,250 @@ def delete_glossary( ) return self._stubs["delete_glossary"] + @property + def get_glossary_entry( + self, + ) -> Callable[[translation_service.GetGlossaryEntryRequest], common.GlossaryEntry]: + r"""Return a callable for the get glossary entry method over gRPC. + + Gets a single glossary entry by the given id. + + Returns: + Callable[[~.GetGlossaryEntryRequest], + ~.GlossaryEntry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_glossary_entry" not in self._stubs: + self._stubs["get_glossary_entry"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/GetGlossaryEntry", + request_serializer=translation_service.GetGlossaryEntryRequest.serialize, + response_deserializer=common.GlossaryEntry.deserialize, + ) + return self._stubs["get_glossary_entry"] + + @property + def list_glossary_entries( + self, + ) -> Callable[ + [translation_service.ListGlossaryEntriesRequest], + translation_service.ListGlossaryEntriesResponse, + ]: + r"""Return a callable for the list glossary entries method over gRPC. + + List the entries for the glossary. + + Returns: + Callable[[~.ListGlossaryEntriesRequest], + ~.ListGlossaryEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_glossary_entries" not in self._stubs: + self._stubs["list_glossary_entries"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ListGlossaryEntries", + request_serializer=translation_service.ListGlossaryEntriesRequest.serialize, + response_deserializer=translation_service.ListGlossaryEntriesResponse.deserialize, + ) + return self._stubs["list_glossary_entries"] + + @property + def create_glossary_entry( + self, + ) -> Callable[ + [translation_service.CreateGlossaryEntryRequest], common.GlossaryEntry + ]: + r"""Return a callable for the create glossary entry method over gRPC. + + Creates a glossary entry. + + Returns: + Callable[[~.CreateGlossaryEntryRequest], + ~.GlossaryEntry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_glossary_entry" not in self._stubs: + self._stubs["create_glossary_entry"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/CreateGlossaryEntry", + request_serializer=translation_service.CreateGlossaryEntryRequest.serialize, + response_deserializer=common.GlossaryEntry.deserialize, + ) + return self._stubs["create_glossary_entry"] + + @property + def update_glossary_entry( + self, + ) -> Callable[ + [translation_service.UpdateGlossaryEntryRequest], common.GlossaryEntry + ]: + r"""Return a callable for the update glossary entry method over gRPC. + + Updates a glossary entry. + + Returns: + Callable[[~.UpdateGlossaryEntryRequest], + ~.GlossaryEntry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_glossary_entry" not in self._stubs: + self._stubs["update_glossary_entry"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/UpdateGlossaryEntry", + request_serializer=translation_service.UpdateGlossaryEntryRequest.serialize, + response_deserializer=common.GlossaryEntry.deserialize, + ) + return self._stubs["update_glossary_entry"] + + @property + def delete_glossary_entry( + self, + ) -> Callable[[translation_service.DeleteGlossaryEntryRequest], empty_pb2.Empty]: + r"""Return a callable for the delete glossary entry method over gRPC. + + Deletes a single entry from the glossary + + Returns: + Callable[[~.DeleteGlossaryEntryRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_glossary_entry" not in self._stubs: + self._stubs["delete_glossary_entry"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/DeleteGlossaryEntry", + request_serializer=translation_service.DeleteGlossaryEntryRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_glossary_entry"] + + @property + def create_dataset( + self, + ) -> Callable[[automl_translation.CreateDatasetRequest], operations_pb2.Operation]: + r"""Return a callable for the create dataset method over gRPC. + + Creates a Dataset. + + Returns: + Callable[[~.CreateDatasetRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dataset" not in self._stubs: + self._stubs["create_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/CreateDataset", + request_serializer=automl_translation.CreateDatasetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_dataset"] + + @property + def get_dataset( + self, + ) -> Callable[[automl_translation.GetDatasetRequest], automl_translation.Dataset]: + r"""Return a callable for the get dataset method over gRPC. + + Gets a Dataset. + + Returns: + Callable[[~.GetDatasetRequest], + ~.Dataset]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dataset" not in self._stubs: + self._stubs["get_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/GetDataset", + request_serializer=automl_translation.GetDatasetRequest.serialize, + response_deserializer=automl_translation.Dataset.deserialize, + ) + return self._stubs["get_dataset"] + + @property + def list_datasets( + self, + ) -> Callable[ + [automl_translation.ListDatasetsRequest], + automl_translation.ListDatasetsResponse, + ]: + r"""Return a callable for the list datasets method over gRPC. + + Lists datasets. + + Returns: + Callable[[~.ListDatasetsRequest], + ~.ListDatasetsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_datasets" not in self._stubs: + self._stubs["list_datasets"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ListDatasets", + request_serializer=automl_translation.ListDatasetsRequest.serialize, + response_deserializer=automl_translation.ListDatasetsResponse.deserialize, + ) + return self._stubs["list_datasets"] + + @property + def delete_dataset( + self, + ) -> Callable[[automl_translation.DeleteDatasetRequest], operations_pb2.Operation]: + r"""Return a callable for the delete dataset method over gRPC. + + Deletes a dataset and all of its contents. + + Returns: + Callable[[~.DeleteDatasetRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dataset" not in self._stubs: + self._stubs["delete_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/DeleteDataset", + request_serializer=automl_translation.DeleteDatasetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_dataset"] + @property def create_adaptive_mt_dataset( self, @@ -844,9 +1155,320 @@ def list_adaptive_mt_sentences( ) return self._stubs["list_adaptive_mt_sentences"] + @property + def import_data( + self, + ) -> Callable[[automl_translation.ImportDataRequest], operations_pb2.Operation]: + r"""Return a callable for the import data method over gRPC. + + Import sentence pairs into translation Dataset. + + Returns: + Callable[[~.ImportDataRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_data" not in self._stubs: + self._stubs["import_data"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ImportData", + request_serializer=automl_translation.ImportDataRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_data"] + + @property + def export_data( + self, + ) -> Callable[[automl_translation.ExportDataRequest], operations_pb2.Operation]: + r"""Return a callable for the export data method over gRPC. + + Exports dataset's data to the provided output + location. + + Returns: + Callable[[~.ExportDataRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_data" not in self._stubs: + self._stubs["export_data"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ExportData", + request_serializer=automl_translation.ExportDataRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_data"] + + @property + def list_examples( + self, + ) -> Callable[ + [automl_translation.ListExamplesRequest], + automl_translation.ListExamplesResponse, + ]: + r"""Return a callable for the list examples method over gRPC. + + Lists sentence pairs in the dataset. + + Returns: + Callable[[~.ListExamplesRequest], + ~.ListExamplesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_examples" not in self._stubs: + self._stubs["list_examples"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ListExamples", + request_serializer=automl_translation.ListExamplesRequest.serialize, + response_deserializer=automl_translation.ListExamplesResponse.deserialize, + ) + return self._stubs["list_examples"] + + @property + def create_model( + self, + ) -> Callable[[automl_translation.CreateModelRequest], operations_pb2.Operation]: + r"""Return a callable for the create model method over gRPC. + + Creates a Model. + + Returns: + Callable[[~.CreateModelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_model" not in self._stubs: + self._stubs["create_model"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/CreateModel", + request_serializer=automl_translation.CreateModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_model"] + + @property + def list_models( + self, + ) -> Callable[ + [automl_translation.ListModelsRequest], automl_translation.ListModelsResponse + ]: + r"""Return a callable for the list models method over gRPC. + + Lists models. + + Returns: + Callable[[~.ListModelsRequest], + ~.ListModelsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_models" not in self._stubs: + self._stubs["list_models"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ListModels", + request_serializer=automl_translation.ListModelsRequest.serialize, + response_deserializer=automl_translation.ListModelsResponse.deserialize, + ) + return self._stubs["list_models"] + + @property + def get_model( + self, + ) -> Callable[[automl_translation.GetModelRequest], automl_translation.Model]: + r"""Return a callable for the get model method over gRPC. + + Gets a model. + + Returns: + Callable[[~.GetModelRequest], + ~.Model]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_model" not in self._stubs: + self._stubs["get_model"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/GetModel", + request_serializer=automl_translation.GetModelRequest.serialize, + response_deserializer=automl_translation.Model.deserialize, + ) + return self._stubs["get_model"] + + @property + def delete_model( + self, + ) -> Callable[[automl_translation.DeleteModelRequest], operations_pb2.Operation]: + r"""Return a callable for the delete model method over gRPC. + + Deletes a model. + + Returns: + Callable[[~.DeleteModelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_model" not in self._stubs: + self._stubs["delete_model"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/DeleteModel", + request_serializer=automl_translation.DeleteModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_model"] + def close(self): self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def wait_operation( + self, + ) -> Callable[[operations_pb2.WaitOperationRequest], None]: + r"""Return a callable for the wait_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["wait_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/WaitOperation", + request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["wait_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/grpc_asyncio.py b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/grpc_asyncio.py index 4d1fe33bc975..590a50916f7e 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/grpc_asyncio.py @@ -21,12 +21,20 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.translate_v3.types import adaptive_mt, translation_service +from google.cloud.translate_v3.types import ( + adaptive_mt, + automl_translation, + common, + translation_service, +) from .base import DEFAULT_CLIENT_INFO, TranslationServiceTransport from .grpc import TranslationServiceGrpcTransport @@ -286,6 +294,36 @@ def translate_text( ) return self._stubs["translate_text"] + @property + def romanize_text( + self, + ) -> Callable[ + [translation_service.RomanizeTextRequest], + Awaitable[translation_service.RomanizeTextResponse], + ]: + r"""Return a callable for the romanize text method over gRPC. + + Romanize input text written in non-Latin scripts to + Latin text. + + Returns: + Callable[[~.RomanizeTextRequest], + Awaitable[~.RomanizeTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "romanize_text" not in self._stubs: + self._stubs["romanize_text"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/RomanizeText", + request_serializer=translation_service.RomanizeTextRequest.serialize, + response_deserializer=translation_service.RomanizeTextResponse.deserialize, + ) + return self._stubs["romanize_text"] + @property def detect_language( self, @@ -479,6 +517,35 @@ def create_glossary( ) return self._stubs["create_glossary"] + @property + def update_glossary( + self, + ) -> Callable[ + [translation_service.UpdateGlossaryRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update glossary method over gRPC. + + Updates a glossary. A LRO is used since the update + can be async if the glossary's entry file is updated. + + Returns: + Callable[[~.UpdateGlossaryRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_glossary" not in self._stubs: + self._stubs["update_glossary"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/UpdateGlossary", + request_serializer=translation_service.UpdateGlossaryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_glossary"] + @property def list_glossaries( self, @@ -569,6 +636,262 @@ def delete_glossary( ) return self._stubs["delete_glossary"] + @property + def get_glossary_entry( + self, + ) -> Callable[ + [translation_service.GetGlossaryEntryRequest], Awaitable[common.GlossaryEntry] + ]: + r"""Return a callable for the get glossary entry method over gRPC. + + Gets a single glossary entry by the given id. + + Returns: + Callable[[~.GetGlossaryEntryRequest], + Awaitable[~.GlossaryEntry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_glossary_entry" not in self._stubs: + self._stubs["get_glossary_entry"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/GetGlossaryEntry", + request_serializer=translation_service.GetGlossaryEntryRequest.serialize, + response_deserializer=common.GlossaryEntry.deserialize, + ) + return self._stubs["get_glossary_entry"] + + @property + def list_glossary_entries( + self, + ) -> Callable[ + [translation_service.ListGlossaryEntriesRequest], + Awaitable[translation_service.ListGlossaryEntriesResponse], + ]: + r"""Return a callable for the list glossary entries method over gRPC. + + List the entries for the glossary. + + Returns: + Callable[[~.ListGlossaryEntriesRequest], + Awaitable[~.ListGlossaryEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_glossary_entries" not in self._stubs: + self._stubs["list_glossary_entries"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ListGlossaryEntries", + request_serializer=translation_service.ListGlossaryEntriesRequest.serialize, + response_deserializer=translation_service.ListGlossaryEntriesResponse.deserialize, + ) + return self._stubs["list_glossary_entries"] + + @property + def create_glossary_entry( + self, + ) -> Callable[ + [translation_service.CreateGlossaryEntryRequest], + Awaitable[common.GlossaryEntry], + ]: + r"""Return a callable for the create glossary entry method over gRPC. + + Creates a glossary entry. + + Returns: + Callable[[~.CreateGlossaryEntryRequest], + Awaitable[~.GlossaryEntry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_glossary_entry" not in self._stubs: + self._stubs["create_glossary_entry"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/CreateGlossaryEntry", + request_serializer=translation_service.CreateGlossaryEntryRequest.serialize, + response_deserializer=common.GlossaryEntry.deserialize, + ) + return self._stubs["create_glossary_entry"] + + @property + def update_glossary_entry( + self, + ) -> Callable[ + [translation_service.UpdateGlossaryEntryRequest], + Awaitable[common.GlossaryEntry], + ]: + r"""Return a callable for the update glossary entry method over gRPC. + + Updates a glossary entry. + + Returns: + Callable[[~.UpdateGlossaryEntryRequest], + Awaitable[~.GlossaryEntry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_glossary_entry" not in self._stubs: + self._stubs["update_glossary_entry"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/UpdateGlossaryEntry", + request_serializer=translation_service.UpdateGlossaryEntryRequest.serialize, + response_deserializer=common.GlossaryEntry.deserialize, + ) + return self._stubs["update_glossary_entry"] + + @property + def delete_glossary_entry( + self, + ) -> Callable[ + [translation_service.DeleteGlossaryEntryRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete glossary entry method over gRPC. + + Deletes a single entry from the glossary + + Returns: + Callable[[~.DeleteGlossaryEntryRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_glossary_entry" not in self._stubs: + self._stubs["delete_glossary_entry"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/DeleteGlossaryEntry", + request_serializer=translation_service.DeleteGlossaryEntryRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_glossary_entry"] + + @property + def create_dataset( + self, + ) -> Callable[ + [automl_translation.CreateDatasetRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create dataset method over gRPC. + + Creates a Dataset. + + Returns: + Callable[[~.CreateDatasetRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dataset" not in self._stubs: + self._stubs["create_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/CreateDataset", + request_serializer=automl_translation.CreateDatasetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_dataset"] + + @property + def get_dataset( + self, + ) -> Callable[ + [automl_translation.GetDatasetRequest], Awaitable[automl_translation.Dataset] + ]: + r"""Return a callable for the get dataset method over gRPC. + + Gets a Dataset. + + Returns: + Callable[[~.GetDatasetRequest], + Awaitable[~.Dataset]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dataset" not in self._stubs: + self._stubs["get_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/GetDataset", + request_serializer=automl_translation.GetDatasetRequest.serialize, + response_deserializer=automl_translation.Dataset.deserialize, + ) + return self._stubs["get_dataset"] + + @property + def list_datasets( + self, + ) -> Callable[ + [automl_translation.ListDatasetsRequest], + Awaitable[automl_translation.ListDatasetsResponse], + ]: + r"""Return a callable for the list datasets method over gRPC. + + Lists datasets. + + Returns: + Callable[[~.ListDatasetsRequest], + Awaitable[~.ListDatasetsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_datasets" not in self._stubs: + self._stubs["list_datasets"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ListDatasets", + request_serializer=automl_translation.ListDatasetsRequest.serialize, + response_deserializer=automl_translation.ListDatasetsResponse.deserialize, + ) + return self._stubs["list_datasets"] + + @property + def delete_dataset( + self, + ) -> Callable[ + [automl_translation.DeleteDatasetRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete dataset method over gRPC. + + Deletes a dataset and all of its contents. + + Returns: + Callable[[~.DeleteDatasetRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dataset" not in self._stubs: + self._stubs["delete_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/DeleteDataset", + request_serializer=automl_translation.DeleteDatasetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_dataset"] + @property def create_adaptive_mt_dataset( self, @@ -861,6 +1184,205 @@ def list_adaptive_mt_sentences( ) return self._stubs["list_adaptive_mt_sentences"] + @property + def import_data( + self, + ) -> Callable[ + [automl_translation.ImportDataRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the import data method over gRPC. + + Import sentence pairs into translation Dataset. + + Returns: + Callable[[~.ImportDataRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_data" not in self._stubs: + self._stubs["import_data"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ImportData", + request_serializer=automl_translation.ImportDataRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_data"] + + @property + def export_data( + self, + ) -> Callable[ + [automl_translation.ExportDataRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the export data method over gRPC. + + Exports dataset's data to the provided output + location. + + Returns: + Callable[[~.ExportDataRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_data" not in self._stubs: + self._stubs["export_data"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ExportData", + request_serializer=automl_translation.ExportDataRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_data"] + + @property + def list_examples( + self, + ) -> Callable[ + [automl_translation.ListExamplesRequest], + Awaitable[automl_translation.ListExamplesResponse], + ]: + r"""Return a callable for the list examples method over gRPC. + + Lists sentence pairs in the dataset. + + Returns: + Callable[[~.ListExamplesRequest], + Awaitable[~.ListExamplesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_examples" not in self._stubs: + self._stubs["list_examples"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ListExamples", + request_serializer=automl_translation.ListExamplesRequest.serialize, + response_deserializer=automl_translation.ListExamplesResponse.deserialize, + ) + return self._stubs["list_examples"] + + @property + def create_model( + self, + ) -> Callable[ + [automl_translation.CreateModelRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create model method over gRPC. + + Creates a Model. + + Returns: + Callable[[~.CreateModelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_model" not in self._stubs: + self._stubs["create_model"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/CreateModel", + request_serializer=automl_translation.CreateModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_model"] + + @property + def list_models( + self, + ) -> Callable[ + [automl_translation.ListModelsRequest], + Awaitable[automl_translation.ListModelsResponse], + ]: + r"""Return a callable for the list models method over gRPC. + + Lists models. + + Returns: + Callable[[~.ListModelsRequest], + Awaitable[~.ListModelsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_models" not in self._stubs: + self._stubs["list_models"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ListModels", + request_serializer=automl_translation.ListModelsRequest.serialize, + response_deserializer=automl_translation.ListModelsResponse.deserialize, + ) + return self._stubs["list_models"] + + @property + def get_model( + self, + ) -> Callable[ + [automl_translation.GetModelRequest], Awaitable[automl_translation.Model] + ]: + r"""Return a callable for the get model method over gRPC. + + Gets a model. + + Returns: + Callable[[~.GetModelRequest], + Awaitable[~.Model]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_model" not in self._stubs: + self._stubs["get_model"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/GetModel", + request_serializer=automl_translation.GetModelRequest.serialize, + response_deserializer=automl_translation.Model.deserialize, + ) + return self._stubs["get_model"] + + @property + def delete_model( + self, + ) -> Callable[ + [automl_translation.DeleteModelRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete model method over gRPC. + + Deletes a model. + + Returns: + Callable[[~.DeleteModelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_model" not in self._stubs: + self._stubs["delete_model"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/DeleteModel", + request_serializer=automl_translation.DeleteModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_model"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -869,6 +1391,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.romanize_text: gapic_v1.method_async.wrap_method( + self.romanize_text, + default_timeout=None, + client_info=client_info, + ), self.detect_language: gapic_v1.method_async.wrap_method( self.detect_language, default_timeout=600.0, @@ -909,6 +1436,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.update_glossary: gapic_v1.method_async.wrap_method( + self.update_glossary, + default_timeout=None, + client_info=client_info, + ), self.list_glossaries: gapic_v1.method_async.wrap_method( self.list_glossaries, default_retry=retries.AsyncRetry( @@ -954,6 +1486,51 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.get_glossary_entry: gapic_v1.method_async.wrap_method( + self.get_glossary_entry, + default_timeout=None, + client_info=client_info, + ), + self.list_glossary_entries: gapic_v1.method_async.wrap_method( + self.list_glossary_entries, + default_timeout=None, + client_info=client_info, + ), + self.create_glossary_entry: gapic_v1.method_async.wrap_method( + self.create_glossary_entry, + default_timeout=None, + client_info=client_info, + ), + self.update_glossary_entry: gapic_v1.method_async.wrap_method( + self.update_glossary_entry, + default_timeout=None, + client_info=client_info, + ), + self.delete_glossary_entry: gapic_v1.method_async.wrap_method( + self.delete_glossary_entry, + default_timeout=None, + client_info=client_info, + ), + self.create_dataset: gapic_v1.method_async.wrap_method( + self.create_dataset, + default_timeout=None, + client_info=client_info, + ), + self.get_dataset: gapic_v1.method_async.wrap_method( + self.get_dataset, + default_timeout=None, + client_info=client_info, + ), + self.list_datasets: gapic_v1.method_async.wrap_method( + self.list_datasets, + default_timeout=None, + client_info=client_info, + ), + self.delete_dataset: gapic_v1.method_async.wrap_method( + self.delete_dataset, + default_timeout=None, + client_info=client_info, + ), self.create_adaptive_mt_dataset: gapic_v1.method_async.wrap_method( self.create_adaptive_mt_dataset, default_timeout=None, @@ -1004,10 +1581,168 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.import_data: gapic_v1.method_async.wrap_method( + self.import_data, + default_timeout=None, + client_info=client_info, + ), + self.export_data: gapic_v1.method_async.wrap_method( + self.export_data, + default_timeout=None, + client_info=client_info, + ), + self.list_examples: gapic_v1.method_async.wrap_method( + self.list_examples, + default_timeout=None, + client_info=client_info, + ), + self.create_model: gapic_v1.method_async.wrap_method( + self.create_model, + default_timeout=None, + client_info=client_info, + ), + self.list_models: gapic_v1.method_async.wrap_method( + self.list_models, + default_timeout=None, + client_info=client_info, + ), + self.get_model: gapic_v1.method_async.wrap_method( + self.get_model, + default_timeout=None, + client_info=client_info, + ), + self.delete_model: gapic_v1.method_async.wrap_method( + self.delete_model, + default_timeout=None, + client_info=client_info, + ), } def close(self): return self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def wait_operation( + self, + ) -> Callable[[operations_pb2.WaitOperationRequest], None]: + r"""Return a callable for the wait_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["wait_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/WaitOperation", + request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["wait_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + __all__ = ("TranslationServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/rest.py b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/rest.py index 5060cb939f82..1fdaa26ce7cb 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/rest.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/rest.py @@ -32,6 +32,9 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version @@ -45,7 +48,12 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore -from google.cloud.translate_v3.types import adaptive_mt, translation_service +from google.cloud.translate_v3.types import ( + adaptive_mt, + automl_translation, + common, + translation_service, +) from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import TranslationServiceTransport @@ -104,6 +112,14 @@ def post_create_adaptive_mt_dataset(self, response): logging.log(f"Received response: {response}") return response + def pre_create_dataset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_dataset(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_glossary(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -112,6 +128,22 @@ def post_create_glossary(self, response): logging.log(f"Received response: {response}") return response + def pre_create_glossary_entry(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_glossary_entry(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_model(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_adaptive_mt_dataset(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -120,6 +152,14 @@ def pre_delete_adaptive_mt_file(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_delete_dataset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_dataset(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_glossary(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -128,6 +168,18 @@ def post_delete_glossary(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_glossary_entry(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_model(self, response): + logging.log(f"Received response: {response}") + return response + def pre_detect_language(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -136,6 +188,14 @@ def post_detect_language(self, response): logging.log(f"Received response: {response}") return response + def pre_export_data(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_data(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_adaptive_mt_dataset(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -152,6 +212,14 @@ def post_get_adaptive_mt_file(self, response): logging.log(f"Received response: {response}") return response + def pre_get_dataset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_dataset(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_glossary(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -160,6 +228,22 @@ def post_get_glossary(self, response): logging.log(f"Received response: {response}") return response + def pre_get_glossary_entry(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_glossary_entry(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_model(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_supported_languages(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -176,6 +260,14 @@ def post_import_adaptive_mt_file(self, response): logging.log(f"Received response: {response}") return response + def pre_import_data(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_data(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_adaptive_mt_datasets(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -200,6 +292,22 @@ def post_list_adaptive_mt_sentences(self, response): logging.log(f"Received response: {response}") return response + def pre_list_datasets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_datasets(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_examples(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_examples(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_glossaries(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -208,6 +316,30 @@ def post_list_glossaries(self, response): logging.log(f"Received response: {response}") return response + def pre_list_glossary_entries(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_glossary_entries(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_models(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_models(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_romanize_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_romanize_text(self, response): + logging.log(f"Received response: {response}") + return response + def pre_translate_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -224,6 +356,22 @@ def post_translate_text(self, response): logging.log(f"Received response: {response}") return response + def pre_update_glossary(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_glossary(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_glossary_entry(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_glossary_entry(self, response): + logging.log(f"Received response: {response}") + return response + transport = TranslationServiceRestTransport(interceptor=MyCustomTranslationServiceInterceptor()) client = TranslationServiceClient(transport=transport) @@ -326,6 +474,29 @@ def post_create_adaptive_mt_dataset( """ return response + def pre_create_dataset( + self, + request: automl_translation.CreateDatasetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[automl_translation.CreateDatasetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_dataset + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_create_dataset( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_dataset + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + def pre_create_glossary( self, request: translation_service.CreateGlossaryRequest, @@ -349,6 +520,54 @@ def post_create_glossary( """ return response + def pre_create_glossary_entry( + self, + request: translation_service.CreateGlossaryEntryRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + translation_service.CreateGlossaryEntryRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_glossary_entry + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_create_glossary_entry( + self, response: common.GlossaryEntry + ) -> common.GlossaryEntry: + """Post-rpc interceptor for create_glossary_entry + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + def pre_create_model( + self, + request: automl_translation.CreateModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[automl_translation.CreateModelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_create_model( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_model + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + def pre_delete_adaptive_mt_dataset( self, request: adaptive_mt.DeleteAdaptiveMtDatasetRequest, @@ -373,6 +592,29 @@ def pre_delete_adaptive_mt_file( """ return request, metadata + def pre_delete_dataset( + self, + request: automl_translation.DeleteDatasetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[automl_translation.DeleteDatasetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_dataset + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_delete_dataset( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_dataset + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + def pre_delete_glossary( self, request: translation_service.DeleteGlossaryRequest, @@ -396,6 +638,43 @@ def post_delete_glossary( """ return response + def pre_delete_glossary_entry( + self, + request: translation_service.DeleteGlossaryEntryRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + translation_service.DeleteGlossaryEntryRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_glossary_entry + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def pre_delete_model( + self, + request: automl_translation.DeleteModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[automl_translation.DeleteModelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_delete_model( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_model + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + def pre_detect_language( self, request: translation_service.DetectLanguageRequest, @@ -419,6 +698,29 @@ def post_detect_language( """ return response + def pre_export_data( + self, + request: automl_translation.ExportDataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[automl_translation.ExportDataRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_data + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_export_data( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_data + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + def pre_get_adaptive_mt_dataset( self, request: adaptive_mt.GetAdaptiveMtDatasetRequest, @@ -465,6 +767,29 @@ def post_get_adaptive_mt_file( """ return response + def pre_get_dataset( + self, + request: automl_translation.GetDatasetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[automl_translation.GetDatasetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_dataset + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_get_dataset( + self, response: automl_translation.Dataset + ) -> automl_translation.Dataset: + """Post-rpc interceptor for get_dataset + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + def pre_get_glossary( self, request: translation_service.GetGlossaryRequest, @@ -488,24 +813,22 @@ def post_get_glossary( """ return response - def pre_get_supported_languages( + def pre_get_glossary_entry( self, - request: translation_service.GetSupportedLanguagesRequest, + request: translation_service.GetGlossaryEntryRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[ - translation_service.GetSupportedLanguagesRequest, Sequence[Tuple[str, str]] - ]: - """Pre-rpc interceptor for get_supported_languages + ) -> Tuple[translation_service.GetGlossaryEntryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_glossary_entry Override in a subclass to manipulate the request or metadata before they are sent to the TranslationService server. """ return request, metadata - def post_get_supported_languages( - self, response: translation_service.SupportedLanguages - ) -> translation_service.SupportedLanguages: - """Post-rpc interceptor for get_supported_languages + def post_get_glossary_entry( + self, response: common.GlossaryEntry + ) -> common.GlossaryEntry: + """Post-rpc interceptor for get_glossary_entry Override in a subclass to manipulate the response after it is returned by the TranslationService server but before @@ -513,22 +836,22 @@ def post_get_supported_languages( """ return response - def pre_import_adaptive_mt_file( + def pre_get_model( self, - request: adaptive_mt.ImportAdaptiveMtFileRequest, + request: automl_translation.GetModelRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[adaptive_mt.ImportAdaptiveMtFileRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for import_adaptive_mt_file + ) -> Tuple[automl_translation.GetModelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_model Override in a subclass to manipulate the request or metadata before they are sent to the TranslationService server. """ return request, metadata - def post_import_adaptive_mt_file( - self, response: adaptive_mt.ImportAdaptiveMtFileResponse - ) -> adaptive_mt.ImportAdaptiveMtFileResponse: - """Post-rpc interceptor for import_adaptive_mt_file + def post_get_model( + self, response: automl_translation.Model + ) -> automl_translation.Model: + """Post-rpc interceptor for get_model Override in a subclass to manipulate the response after it is returned by the TranslationService server but before @@ -536,11 +859,82 @@ def post_import_adaptive_mt_file( """ return response - def pre_list_adaptive_mt_datasets( + def pre_get_supported_languages( self, - request: adaptive_mt.ListAdaptiveMtDatasetsRequest, + request: translation_service.GetSupportedLanguagesRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[adaptive_mt.ListAdaptiveMtDatasetsRequest, Sequence[Tuple[str, str]]]: + ) -> Tuple[ + translation_service.GetSupportedLanguagesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_supported_languages + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_get_supported_languages( + self, response: translation_service.SupportedLanguages + ) -> translation_service.SupportedLanguages: + """Post-rpc interceptor for get_supported_languages + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + def pre_import_adaptive_mt_file( + self, + request: adaptive_mt.ImportAdaptiveMtFileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[adaptive_mt.ImportAdaptiveMtFileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for import_adaptive_mt_file + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_import_adaptive_mt_file( + self, response: adaptive_mt.ImportAdaptiveMtFileResponse + ) -> adaptive_mt.ImportAdaptiveMtFileResponse: + """Post-rpc interceptor for import_adaptive_mt_file + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + def pre_import_data( + self, + request: automl_translation.ImportDataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[automl_translation.ImportDataRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for import_data + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_import_data( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for import_data + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + def pre_list_adaptive_mt_datasets( + self, + request: adaptive_mt.ListAdaptiveMtDatasetsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[adaptive_mt.ListAdaptiveMtDatasetsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_adaptive_mt_datasets Override in a subclass to manipulate the request or metadata @@ -605,6 +999,52 @@ def post_list_adaptive_mt_sentences( """ return response + def pre_list_datasets( + self, + request: automl_translation.ListDatasetsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[automl_translation.ListDatasetsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_datasets + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_list_datasets( + self, response: automl_translation.ListDatasetsResponse + ) -> automl_translation.ListDatasetsResponse: + """Post-rpc interceptor for list_datasets + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + def pre_list_examples( + self, + request: automl_translation.ListExamplesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[automl_translation.ListExamplesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_examples + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_list_examples( + self, response: automl_translation.ListExamplesResponse + ) -> automl_translation.ListExamplesResponse: + """Post-rpc interceptor for list_examples + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + def pre_list_glossaries( self, request: translation_service.ListGlossariesRequest, @@ -628,6 +1068,77 @@ def post_list_glossaries( """ return response + def pre_list_glossary_entries( + self, + request: translation_service.ListGlossaryEntriesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + translation_service.ListGlossaryEntriesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_glossary_entries + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_list_glossary_entries( + self, response: translation_service.ListGlossaryEntriesResponse + ) -> translation_service.ListGlossaryEntriesResponse: + """Post-rpc interceptor for list_glossary_entries + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + def pre_list_models( + self, + request: automl_translation.ListModelsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[automl_translation.ListModelsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_models + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_list_models( + self, response: automl_translation.ListModelsResponse + ) -> automl_translation.ListModelsResponse: + """Post-rpc interceptor for list_models + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + def pre_romanize_text( + self, + request: translation_service.RomanizeTextRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[translation_service.RomanizeTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for romanize_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_romanize_text( + self, response: translation_service.RomanizeTextResponse + ) -> translation_service.RomanizeTextResponse: + """Post-rpc interceptor for romanize_text + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + def pre_translate_document( self, request: translation_service.TranslateDocumentRequest, @@ -674,92 +1185,297 @@ def post_translate_text( """ return response + def pre_update_glossary( + self, + request: translation_service.UpdateGlossaryRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[translation_service.UpdateGlossaryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_glossary -@dataclasses.dataclass -class TranslationServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: TranslationServiceRestInterceptor + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + def post_update_glossary( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_glossary -class TranslationServiceRestTransport(TranslationServiceTransport): - """REST backend transport for TranslationService. + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response - Provides natural language translation operations. + def pre_update_glossary_entry( + self, + request: translation_service.UpdateGlossaryEntryRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + translation_service.UpdateGlossaryEntryRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_glossary_entry - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata - It sends JSON representations of protocol buffers over HTTP/1.1 + def post_update_glossary_entry( + self, response: common.GlossaryEntry + ) -> common.GlossaryEntry: + """Post-rpc interceptor for update_glossary_entry - """ + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response - def __init__( + def pre_get_location( self, - *, - host: str = "translate.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[TranslationServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location - Args: - host (Optional[str]): - The hostname to connect to (default: 'translate.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER + return response - url_match_items = maybe_url_match.groupdict() + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + def pre_wait_operation( + self, + request: operations_pb2.WaitOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for wait_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_wait_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for wait_operation + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TranslationServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TranslationServiceRestInterceptor + + +class TranslationServiceRestTransport(TranslationServiceTransport): + """REST backend transport for TranslationService. + + Provides natural language translation operations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "translate.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[TranslationServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'translate.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, always_use_jwt_access=always_use_jwt_access, api_audience=api_audience, ) @@ -1213,9 +1929,9 @@ def __call__( resp = self._interceptor.post_create_adaptive_mt_dataset(resp) return resp - class _CreateGlossary(TranslationServiceRestStub): + class _CreateDataset(TranslationServiceRestStub): def __hash__(self): - return hash("CreateGlossary") + return hash("CreateDataset") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1229,17 +1945,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: translation_service.CreateGlossaryRequest, + request: automl_translation.CreateDatasetRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the create glossary method over HTTP. + r"""Call the create dataset method over HTTP. Args: - request (~.translation_service.CreateGlossaryRequest): - The request object. Request message for CreateGlossary. + request (~.automl_translation.CreateDatasetRequest): + The request object. Request message for CreateDataset. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1257,12 +1973,12 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v3/{parent=projects/*/locations/*}/glossaries", - "body": "glossary", + "uri": "/v3/{parent=projects/*/locations/*}/datasets", + "body": "dataset", }, ] - request, metadata = self._interceptor.pre_create_glossary(request, metadata) - pb_request = translation_service.CreateGlossaryRequest.pb(request) + request, metadata = self._interceptor.pre_create_dataset(request, metadata) + pb_request = automl_translation.CreateDatasetRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -1303,12 +2019,12 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_glossary(resp) + resp = self._interceptor.post_create_dataset(resp) return resp - class _DeleteAdaptiveMtDataset(TranslationServiceRestStub): + class _CreateGlossary(TranslationServiceRestStub): def __hash__(self): - return hash("DeleteAdaptiveMtDataset") + return hash("CreateGlossary") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1322,38 +2038,47 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: adaptive_mt.DeleteAdaptiveMtDatasetRequest, + request: translation_service.CreateGlossaryRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ): - r"""Call the delete adaptive mt - dataset method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the create glossary method over HTTP. + + Args: + request (~.translation_service.CreateGlossaryRequest): + The request object. Request message for CreateGlossary. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. - Args: - request (~.adaptive_mt.DeleteAdaptiveMtDatasetRequest): - The request object. Request message for deleting an - AdaptiveMtDataset. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. """ http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*}", + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*}/glossaries", + "body": "glossary", }, ] - request, metadata = self._interceptor.pre_delete_adaptive_mt_dataset( - request, metadata - ) - pb_request = adaptive_mt.DeleteAdaptiveMtDatasetRequest.pb(request) + request, metadata = self._interceptor.pre_create_glossary(request, metadata) + pb_request = translation_service.CreateGlossaryRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1376,6 +2101,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1383,9 +2109,15 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _DeleteAdaptiveMtFile(TranslationServiceRestStub): + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_glossary(resp) + return resp + + class _CreateGlossaryEntry(TranslationServiceRestStub): def __hash__(self): - return hash("DeleteAdaptiveMtFile") + return hash("CreateGlossaryEntry") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1399,37 +2131,49 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: adaptive_mt.DeleteAdaptiveMtFileRequest, + request: translation_service.CreateGlossaryEntryRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ): - r"""Call the delete adaptive mt file method over HTTP. + ) -> common.GlossaryEntry: + r"""Call the create glossary entry method over HTTP. Args: - request (~.adaptive_mt.DeleteAdaptiveMtFileRequest): - The request object. The request for deleting an - AdaptiveMt file. + request (~.translation_service.CreateGlossaryEntryRequest): + The request object. Request message for + CreateGlossaryEntry retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + ~.common.GlossaryEntry: + Represents a single entry in a + glossary. + """ http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}", + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/glossaries/*}/glossaryEntries", + "body": "glossary_entry", }, ] - request, metadata = self._interceptor.pre_delete_adaptive_mt_file( + request, metadata = self._interceptor.pre_create_glossary_entry( request, metadata ) - pb_request = adaptive_mt.DeleteAdaptiveMtFileRequest.pb(request) + pb_request = translation_service.CreateGlossaryEntryRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1452,6 +2196,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1459,9 +2204,17 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _DeleteGlossary(TranslationServiceRestStub): + # Return the response + resp = common.GlossaryEntry() + pb_resp = common.GlossaryEntry.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_glossary_entry(resp) + return resp + + class _CreateModel(TranslationServiceRestStub): def __hash__(self): - return hash("DeleteGlossary") + return hash("CreateModel") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1475,17 +2228,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: translation_service.DeleteGlossaryRequest, + request: automl_translation.CreateModelRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the delete glossary method over HTTP. + r"""Call the create model method over HTTP. Args: - request (~.translation_service.DeleteGlossaryRequest): - The request object. Request message for DeleteGlossary. + request (~.automl_translation.CreateModelRequest): + The request object. Request message for CreateModel. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1502,14 +2255,20 @@ def __call__( http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v3/{name=projects/*/locations/*/glossaries/*}", + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*}/models", + "body": "model", }, ] - request, metadata = self._interceptor.pre_delete_glossary(request, metadata) - pb_request = translation_service.DeleteGlossaryRequest.pb(request) + request, metadata = self._interceptor.pre_create_model(request, metadata) + pb_request = automl_translation.CreateModelRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1532,6 +2291,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1542,12 +2302,12 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_glossary(resp) + resp = self._interceptor.post_create_model(resp) return resp - class _DetectLanguage(TranslationServiceRestStub): + class _DeleteAdaptiveMtDataset(TranslationServiceRestStub): def __hash__(self): - return hash("DetectLanguage") + return hash("DeleteAdaptiveMtDataset") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1561,52 +2321,38 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: translation_service.DetectLanguageRequest, + request: adaptive_mt.DeleteAdaptiveMtDatasetRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> translation_service.DetectLanguageResponse: - r"""Call the detect language method over HTTP. - - Args: - request (~.translation_service.DetectLanguageRequest): - The request object. The request message for language - detection. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.translation_service.DetectLanguageResponse: - The response message for language - detection. + ): + r"""Call the delete adaptive mt + dataset method over HTTP. + Args: + request (~.adaptive_mt.DeleteAdaptiveMtDatasetRequest): + The request object. Request message for deleting an + AdaptiveMtDataset. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. """ http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v3/{parent=projects/*/locations/*}:detectLanguage", - "body": "*", - }, - { - "method": "post", - "uri": "/v3/{parent=projects/*}:detectLanguage", - "body": "*", + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*}", }, ] - request, metadata = self._interceptor.pre_detect_language(request, metadata) - pb_request = translation_service.DetectLanguageRequest.pb(request) + request, metadata = self._interceptor.pre_delete_adaptive_mt_dataset( + request, metadata + ) + pb_request = adaptive_mt.DeleteAdaptiveMtDatasetRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1629,7 +2375,6 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1637,17 +2382,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - # Return the response - resp = translation_service.DetectLanguageResponse() - pb_resp = translation_service.DetectLanguageResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_detect_language(resp) - return resp - - class _GetAdaptiveMtDataset(TranslationServiceRestStub): + class _DeleteAdaptiveMtFile(TranslationServiceRestStub): def __hash__(self): - return hash("GetAdaptiveMtDataset") + return hash("DeleteAdaptiveMtFile") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1661,39 +2398,35 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: adaptive_mt.GetAdaptiveMtDatasetRequest, + request: adaptive_mt.DeleteAdaptiveMtFileRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> adaptive_mt.AdaptiveMtDataset: - r"""Call the get adaptive mt dataset method over HTTP. + ): + r"""Call the delete adaptive mt file method over HTTP. Args: - request (~.adaptive_mt.GetAdaptiveMtDatasetRequest): - The request object. Request message for getting an - Adaptive MT dataset. + request (~.adaptive_mt.DeleteAdaptiveMtFileRequest): + The request object. The request for deleting an + AdaptiveMt file. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - ~.adaptive_mt.AdaptiveMtDataset: - An Adaptive MT Dataset. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*}", + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}", }, ] - request, metadata = self._interceptor.pre_get_adaptive_mt_dataset( + request, metadata = self._interceptor.pre_delete_adaptive_mt_file( request, metadata ) - pb_request = adaptive_mt.GetAdaptiveMtDatasetRequest.pb(request) + pb_request = adaptive_mt.DeleteAdaptiveMtFileRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -1725,17 +2458,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - # Return the response - resp = adaptive_mt.AdaptiveMtDataset() - pb_resp = adaptive_mt.AdaptiveMtDataset.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_adaptive_mt_dataset(resp) - return resp - - class _GetAdaptiveMtFile(TranslationServiceRestStub): + class _DeleteDataset(TranslationServiceRestStub): def __hash__(self): - return hash("GetAdaptiveMtFile") + return hash("DeleteDataset") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1749,18 +2474,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: adaptive_mt.GetAdaptiveMtFileRequest, + request: automl_translation.DeleteDatasetRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> adaptive_mt.AdaptiveMtFile: - r"""Call the get adaptive mt file method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the delete dataset method over HTTP. Args: - request (~.adaptive_mt.GetAdaptiveMtFileRequest): - The request object. The request for getting an - AdaptiveMtFile. + request (~.automl_translation.DeleteDatasetRequest): + The request object. Request message for DeleteDataset. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1768,20 +2492,21 @@ def __call__( sent along with the request as metadata. Returns: - ~.adaptive_mt.AdaptiveMtFile: - An AdaptiveMtFile. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}", + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/datasets/*}", }, ] - request, metadata = self._interceptor.pre_get_adaptive_mt_file( - request, metadata - ) - pb_request = adaptive_mt.GetAdaptiveMtFileRequest.pb(request) + request, metadata = self._interceptor.pre_delete_dataset(request, metadata) + pb_request = automl_translation.DeleteDatasetRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -1814,16 +2539,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = adaptive_mt.AdaptiveMtFile() - pb_resp = adaptive_mt.AdaptiveMtFile.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_adaptive_mt_file(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_dataset(resp) return resp - class _GetGlossary(TranslationServiceRestStub): + class _DeleteGlossary(TranslationServiceRestStub): def __hash__(self): - return hash("GetGlossary") + return hash("DeleteGlossary") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1837,17 +2560,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: translation_service.GetGlossaryRequest, + request: translation_service.DeleteGlossaryRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> translation_service.Glossary: - r"""Call the get glossary method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the delete glossary method over HTTP. Args: - request (~.translation_service.GetGlossaryRequest): - The request object. Request message for GetGlossary. + request (~.translation_service.DeleteGlossaryRequest): + The request object. Request message for DeleteGlossary. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1855,20 +2578,21 @@ def __call__( sent along with the request as metadata. Returns: - ~.translation_service.Glossary: - Represents a glossary built from - user-provided data. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", + "method": "delete", "uri": "/v3/{name=projects/*/locations/*/glossaries/*}", }, ] - request, metadata = self._interceptor.pre_get_glossary(request, metadata) - pb_request = translation_service.GetGlossaryRequest.pb(request) + request, metadata = self._interceptor.pre_delete_glossary(request, metadata) + pb_request = translation_service.DeleteGlossaryRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -1901,16 +2625,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = translation_service.Glossary() - pb_resp = translation_service.Glossary.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_glossary(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_glossary(resp) return resp - class _GetSupportedLanguages(TranslationServiceRestStub): + class _DeleteGlossaryEntry(TranslationServiceRestStub): def __hash__(self): - return hash("GetSupportedLanguages") + return hash("DeleteGlossaryEntry") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1924,45 +2646,35 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: translation_service.GetSupportedLanguagesRequest, + request: translation_service.DeleteGlossaryEntryRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> translation_service.SupportedLanguages: - r"""Call the get supported languages method over HTTP. + ): + r"""Call the delete glossary entry method over HTTP. Args: - request (~.translation_service.GetSupportedLanguagesRequest): - The request object. The request message for discovering - supported languages. + request (~.translation_service.DeleteGlossaryEntryRequest): + The request object. Request message for Delete Glossary + Entry retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - ~.translation_service.SupportedLanguages: - The response message for discovering - supported languages. - """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v3/{parent=projects/*/locations/*}/supportedLanguages", - }, - { - "method": "get", - "uri": "/v3/{parent=projects/*}/supportedLanguages", + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/glossaries/*/glossaryEntries/*}", }, ] - request, metadata = self._interceptor.pre_get_supported_languages( + request, metadata = self._interceptor.pre_delete_glossary_entry( request, metadata ) - pb_request = translation_service.GetSupportedLanguagesRequest.pb(request) + pb_request = translation_service.DeleteGlossaryEntryRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -1994,17 +2706,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - # Return the response - resp = translation_service.SupportedLanguages() - pb_resp = translation_service.SupportedLanguages.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_supported_languages(resp) - return resp - - class _ImportAdaptiveMtFile(TranslationServiceRestStub): + class _DeleteModel(TranslationServiceRestStub): def __hash__(self): - return hash("ImportAdaptiveMtFile") + return hash("DeleteModel") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2018,19 +2722,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: adaptive_mt.ImportAdaptiveMtFileRequest, + request: automl_translation.DeleteModelRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> adaptive_mt.ImportAdaptiveMtFileResponse: - r"""Call the import adaptive mt file method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the delete model method over HTTP. Args: - request (~.adaptive_mt.ImportAdaptiveMtFileRequest): - The request object. The request for importing an - AdaptiveMt file along with its - sentences. + request (~.automl_translation.DeleteModelRequest): + The request object. Request message for DeleteModel. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2038,32 +2740,25 @@ def __call__( sent along with the request as metadata. Returns: - ~.adaptive_mt.ImportAdaptiveMtFileResponse: - The response for importing an - AdaptiveMtFile + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*}:importAdaptiveMtFile", - "body": "*", + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/models/*}", }, ] - request, metadata = self._interceptor.pre_import_adaptive_mt_file( - request, metadata - ) - pb_request = adaptive_mt.ImportAdaptiveMtFileRequest.pb(request) + request, metadata = self._interceptor.pre_delete_model(request, metadata) + pb_request = automl_translation.DeleteModelRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params query_params = json.loads( @@ -2084,7 +2779,6 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2093,16 +2787,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = adaptive_mt.ImportAdaptiveMtFileResponse() - pb_resp = adaptive_mt.ImportAdaptiveMtFileResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_import_adaptive_mt_file(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_model(resp) return resp - class _ListAdaptiveMtDatasets(TranslationServiceRestStub): + class _DetectLanguage(TranslationServiceRestStub): def __hash__(self): - return hash("ListAdaptiveMtDatasets") + return hash("DetectLanguage") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2116,19 +2808,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: adaptive_mt.ListAdaptiveMtDatasetsRequest, + request: translation_service.DetectLanguageRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> adaptive_mt.ListAdaptiveMtDatasetsResponse: - r"""Call the list adaptive mt datasets method over HTTP. + ) -> translation_service.DetectLanguageResponse: + r"""Call the detect language method over HTTP. Args: - request (~.adaptive_mt.ListAdaptiveMtDatasetsRequest): - The request object. Request message for listing all - Adaptive MT datasets that the requestor - has access to. + request (~.translation_service.DetectLanguageRequest): + The request object. The request message for language + detection. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2136,22 +2827,33 @@ def __call__( sent along with the request as metadata. Returns: - ~.adaptive_mt.ListAdaptiveMtDatasetsResponse: - A list of AdaptiveMtDatasets. + ~.translation_service.DetectLanguageResponse: + The response message for language + detection. + """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v3/{parent=projects/*/locations/*}/adaptiveMtDatasets", + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*}:detectLanguage", + "body": "*", + }, + { + "method": "post", + "uri": "/v3/{parent=projects/*}:detectLanguage", + "body": "*", }, ] - request, metadata = self._interceptor.pre_list_adaptive_mt_datasets( - request, metadata - ) - pb_request = adaptive_mt.ListAdaptiveMtDatasetsRequest.pb(request) + request, metadata = self._interceptor.pre_detect_language(request, metadata) + pb_request = translation_service.DetectLanguageRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2174,6 +2876,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2182,16 +2885,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = adaptive_mt.ListAdaptiveMtDatasetsResponse() - pb_resp = adaptive_mt.ListAdaptiveMtDatasetsResponse.pb(resp) + resp = translation_service.DetectLanguageResponse() + pb_resp = translation_service.DetectLanguageResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_adaptive_mt_datasets(resp) + resp = self._interceptor.post_detect_language(resp) return resp - class _ListAdaptiveMtFiles(TranslationServiceRestStub): + class _ExportData(TranslationServiceRestStub): def __hash__(self): - return hash("ListAdaptiveMtFiles") + return hash("ExportData") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2205,18 +2908,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: adaptive_mt.ListAdaptiveMtFilesRequest, + request: automl_translation.ExportDataRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> adaptive_mt.ListAdaptiveMtFilesResponse: - r"""Call the list adaptive mt files method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the export data method over HTTP. Args: - request (~.adaptive_mt.ListAdaptiveMtFilesRequest): - The request object. The request to list all AdaptiveMt - files under a given dataset. + request (~.automl_translation.ExportDataRequest): + The request object. Request message for ExportData. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2224,24 +2926,29 @@ def __call__( sent along with the request as metadata. Returns: - ~.adaptive_mt.ListAdaptiveMtFilesResponse: - The response for listing all - AdaptiveMt files under a given dataset. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*}/adaptiveMtFiles", + "method": "post", + "uri": "/v3/{dataset=projects/*/locations/*/datasets/*}:exportData", + "body": "*", }, ] - request, metadata = self._interceptor.pre_list_adaptive_mt_files( - request, metadata - ) - pb_request = adaptive_mt.ListAdaptiveMtFilesRequest.pb(request) + request, metadata = self._interceptor.pre_export_data(request, metadata) + pb_request = automl_translation.ExportDataRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2264,6 +2971,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2272,16 +2980,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = adaptive_mt.ListAdaptiveMtFilesResponse() - pb_resp = adaptive_mt.ListAdaptiveMtFilesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_adaptive_mt_files(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_data(resp) return resp - class _ListAdaptiveMtSentences(TranslationServiceRestStub): + class _GetAdaptiveMtDataset(TranslationServiceRestStub): def __hash__(self): - return hash("ListAdaptiveMtSentences") + return hash("GetAdaptiveMtDataset") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2295,44 +3001,39 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: adaptive_mt.ListAdaptiveMtSentencesRequest, + request: adaptive_mt.GetAdaptiveMtDatasetRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> adaptive_mt.ListAdaptiveMtSentencesResponse: - r"""Call the list adaptive mt - sentences method over HTTP. + ) -> adaptive_mt.AdaptiveMtDataset: + r"""Call the get adaptive mt dataset method over HTTP. - Args: - request (~.adaptive_mt.ListAdaptiveMtSentencesRequest): - The request object. The request for listing Adaptive MT - sentences from a Dataset/File. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + Args: + request (~.adaptive_mt.GetAdaptiveMtDatasetRequest): + The request object. Request message for getting an + Adaptive MT dataset. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - Returns: - ~.adaptive_mt.ListAdaptiveMtSentencesResponse: - List AdaptiveMt sentences response. + Returns: + ~.adaptive_mt.AdaptiveMtDataset: + An Adaptive MT Dataset. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}/adaptiveMtSentences", - }, - { - "method": "get", - "uri": "/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*}/adaptiveMtSentences", + "uri": "/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*}", }, ] - request, metadata = self._interceptor.pre_list_adaptive_mt_sentences( + request, metadata = self._interceptor.pre_get_adaptive_mt_dataset( request, metadata ) - pb_request = adaptive_mt.ListAdaptiveMtSentencesRequest.pb(request) + pb_request = adaptive_mt.GetAdaptiveMtDatasetRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -2365,16 +3066,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = adaptive_mt.ListAdaptiveMtSentencesResponse() - pb_resp = adaptive_mt.ListAdaptiveMtSentencesResponse.pb(resp) + resp = adaptive_mt.AdaptiveMtDataset() + pb_resp = adaptive_mt.AdaptiveMtDataset.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_adaptive_mt_sentences(resp) + resp = self._interceptor.post_get_adaptive_mt_dataset(resp) return resp - class _ListGlossaries(TranslationServiceRestStub): + class _GetAdaptiveMtFile(TranslationServiceRestStub): def __hash__(self): - return hash("ListGlossaries") + return hash("GetAdaptiveMtFile") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2388,17 +3089,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: translation_service.ListGlossariesRequest, + request: adaptive_mt.GetAdaptiveMtFileRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> translation_service.ListGlossariesResponse: - r"""Call the list glossaries method over HTTP. + ) -> adaptive_mt.AdaptiveMtFile: + r"""Call the get adaptive mt file method over HTTP. Args: - request (~.translation_service.ListGlossariesRequest): - The request object. Request message for ListGlossaries. + request (~.adaptive_mt.GetAdaptiveMtFileRequest): + The request object. The request for getting an + AdaptiveMtFile. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2406,18 +3108,20 @@ def __call__( sent along with the request as metadata. Returns: - ~.translation_service.ListGlossariesResponse: - Response message for ListGlossaries. + ~.adaptive_mt.AdaptiveMtFile: + An AdaptiveMtFile. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v3/{parent=projects/*/locations/*}/glossaries", + "uri": "/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}", }, ] - request, metadata = self._interceptor.pre_list_glossaries(request, metadata) - pb_request = translation_service.ListGlossariesRequest.pb(request) + request, metadata = self._interceptor.pre_get_adaptive_mt_file( + request, metadata + ) + pb_request = adaptive_mt.GetAdaptiveMtFileRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -2450,16 +3154,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = translation_service.ListGlossariesResponse() - pb_resp = translation_service.ListGlossariesResponse.pb(resp) + resp = adaptive_mt.AdaptiveMtFile() + pb_resp = adaptive_mt.AdaptiveMtFile.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_glossaries(resp) + resp = self._interceptor.post_get_adaptive_mt_file(resp) return resp - class _TranslateDocument(TranslationServiceRestStub): + class _GetDataset(TranslationServiceRestStub): def __hash__(self): - return hash("TranslateDocument") + return hash("GetDataset") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2473,17 +3177,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: translation_service.TranslateDocumentRequest, + request: automl_translation.GetDatasetRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> translation_service.TranslateDocumentResponse: - r"""Call the translate document method over HTTP. + ) -> automl_translation.Dataset: + r"""Call the get dataset method over HTTP. Args: - request (~.translation_service.TranslateDocumentRequest): - The request object. A document translation request. + request (~.automl_translation.GetDatasetRequest): + The request object. Request message for GetDataset. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2491,30 +3195,23 @@ def __call__( sent along with the request as metadata. Returns: - ~.translation_service.TranslateDocumentResponse: - A translated document response - message. + ~.automl_translation.Dataset: + A dataset that hosts the examples + (sentence pairs) used for translation + models. """ http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v3/{parent=projects/*/locations/*}:translateDocument", - "body": "*", + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/datasets/*}", }, ] - request, metadata = self._interceptor.pre_translate_document( - request, metadata - ) - pb_request = translation_service.TranslateDocumentRequest.pb(request) + request, metadata = self._interceptor.pre_get_dataset(request, metadata) + pb_request = automl_translation.GetDatasetRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2537,7 +3234,6 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2546,16 +3242,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = translation_service.TranslateDocumentResponse() - pb_resp = translation_service.TranslateDocumentResponse.pb(resp) + resp = automl_translation.Dataset() + pb_resp = automl_translation.Dataset.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_translate_document(resp) + resp = self._interceptor.post_get_dataset(resp) return resp - class _TranslateText(TranslationServiceRestStub): + class _GetGlossary(TranslationServiceRestStub): def __hash__(self): - return hash("TranslateText") + return hash("GetGlossary") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2569,18 +3265,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: translation_service.TranslateTextRequest, + request: translation_service.GetGlossaryRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> translation_service.TranslateTextResponse: - r"""Call the translate text method over HTTP. + ) -> translation_service.Glossary: + r"""Call the get glossary method over HTTP. Args: - request (~.translation_service.TranslateTextRequest): - The request object. The request message for synchronous - translation. + request (~.translation_service.GetGlossaryRequest): + The request object. Request message for GetGlossary. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2588,31 +3283,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.translation_service.TranslateTextResponse: + ~.translation_service.Glossary: + Represents a glossary built from + user-provided data. """ http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v3/{parent=projects/*/locations/*}:translateText", - "body": "*", - }, - { - "method": "post", - "uri": "/v3/{parent=projects/*}:translateText", - "body": "*", + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/glossaries/*}", }, ] - request, metadata = self._interceptor.pre_translate_text(request, metadata) - pb_request = translation_service.TranslateTextRequest.pb(request) + request, metadata = self._interceptor.pre_get_glossary(request, metadata) + pb_request = translation_service.GetGlossaryRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2635,7 +3321,6 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2644,216 +3329,2487 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = translation_service.TranslateTextResponse() - pb_resp = translation_service.TranslateTextResponse.pb(resp) + resp = translation_service.Glossary() + pb_resp = translation_service.Glossary.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_translate_text(resp) + resp = self._interceptor.post_get_glossary(resp) + return resp + + class _GetGlossaryEntry(TranslationServiceRestStub): + def __hash__(self): + return hash("GetGlossaryEntry") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: translation_service.GetGlossaryEntryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common.GlossaryEntry: + r"""Call the get glossary entry method over HTTP. + + Args: + request (~.translation_service.GetGlossaryEntryRequest): + The request object. Request message for the Get Glossary + Entry Api + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common.GlossaryEntry: + Represents a single entry in a + glossary. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/glossaries/*/glossaryEntries/*}", + }, + ] + request, metadata = self._interceptor.pre_get_glossary_entry( + request, metadata + ) + pb_request = translation_service.GetGlossaryEntryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common.GlossaryEntry() + pb_resp = common.GlossaryEntry.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_glossary_entry(resp) + return resp + + class _GetModel(TranslationServiceRestStub): + def __hash__(self): + return hash("GetModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: automl_translation.GetModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> automl_translation.Model: + r"""Call the get model method over HTTP. + + Args: + request (~.automl_translation.GetModelRequest): + The request object. Request message for GetModel. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.automl_translation.Model: + A trained translation model. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/models/*}", + }, + ] + request, metadata = self._interceptor.pre_get_model(request, metadata) + pb_request = automl_translation.GetModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = automl_translation.Model() + pb_resp = automl_translation.Model.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_model(resp) + return resp + + class _GetSupportedLanguages(TranslationServiceRestStub): + def __hash__(self): + return hash("GetSupportedLanguages") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: translation_service.GetSupportedLanguagesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> translation_service.SupportedLanguages: + r"""Call the get supported languages method over HTTP. + + Args: + request (~.translation_service.GetSupportedLanguagesRequest): + The request object. The request message for discovering + supported languages. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.translation_service.SupportedLanguages: + The response message for discovering + supported languages. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*}/supportedLanguages", + }, + { + "method": "get", + "uri": "/v3/{parent=projects/*}/supportedLanguages", + }, + ] + request, metadata = self._interceptor.pre_get_supported_languages( + request, metadata + ) + pb_request = translation_service.GetSupportedLanguagesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = translation_service.SupportedLanguages() + pb_resp = translation_service.SupportedLanguages.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_supported_languages(resp) return resp + class _ImportAdaptiveMtFile(TranslationServiceRestStub): + def __hash__(self): + return hash("ImportAdaptiveMtFile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: adaptive_mt.ImportAdaptiveMtFileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.ImportAdaptiveMtFileResponse: + r"""Call the import adaptive mt file method over HTTP. + + Args: + request (~.adaptive_mt.ImportAdaptiveMtFileRequest): + The request object. The request for importing an + AdaptiveMt file along with its + sentences. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.adaptive_mt.ImportAdaptiveMtFileResponse: + The response for importing an + AdaptiveMtFile + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*}:importAdaptiveMtFile", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_import_adaptive_mt_file( + request, metadata + ) + pb_request = adaptive_mt.ImportAdaptiveMtFileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = adaptive_mt.ImportAdaptiveMtFileResponse() + pb_resp = adaptive_mt.ImportAdaptiveMtFileResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_adaptive_mt_file(resp) + return resp + + class _ImportData(TranslationServiceRestStub): + def __hash__(self): + return hash("ImportData") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: automl_translation.ImportDataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the import data method over HTTP. + + Args: + request (~.automl_translation.ImportDataRequest): + The request object. Request message for ImportData. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{dataset=projects/*/locations/*/datasets/*}:importData", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_import_data(request, metadata) + pb_request = automl_translation.ImportDataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_data(resp) + return resp + + class _ListAdaptiveMtDatasets(TranslationServiceRestStub): + def __hash__(self): + return hash("ListAdaptiveMtDatasets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: adaptive_mt.ListAdaptiveMtDatasetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.ListAdaptiveMtDatasetsResponse: + r"""Call the list adaptive mt datasets method over HTTP. + + Args: + request (~.adaptive_mt.ListAdaptiveMtDatasetsRequest): + The request object. Request message for listing all + Adaptive MT datasets that the requestor + has access to. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.adaptive_mt.ListAdaptiveMtDatasetsResponse: + A list of AdaptiveMtDatasets. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*}/adaptiveMtDatasets", + }, + ] + request, metadata = self._interceptor.pre_list_adaptive_mt_datasets( + request, metadata + ) + pb_request = adaptive_mt.ListAdaptiveMtDatasetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = adaptive_mt.ListAdaptiveMtDatasetsResponse() + pb_resp = adaptive_mt.ListAdaptiveMtDatasetsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_adaptive_mt_datasets(resp) + return resp + + class _ListAdaptiveMtFiles(TranslationServiceRestStub): + def __hash__(self): + return hash("ListAdaptiveMtFiles") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: adaptive_mt.ListAdaptiveMtFilesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.ListAdaptiveMtFilesResponse: + r"""Call the list adaptive mt files method over HTTP. + + Args: + request (~.adaptive_mt.ListAdaptiveMtFilesRequest): + The request object. The request to list all AdaptiveMt + files under a given dataset. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.adaptive_mt.ListAdaptiveMtFilesResponse: + The response for listing all + AdaptiveMt files under a given dataset. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*}/adaptiveMtFiles", + }, + ] + request, metadata = self._interceptor.pre_list_adaptive_mt_files( + request, metadata + ) + pb_request = adaptive_mt.ListAdaptiveMtFilesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = adaptive_mt.ListAdaptiveMtFilesResponse() + pb_resp = adaptive_mt.ListAdaptiveMtFilesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_adaptive_mt_files(resp) + return resp + + class _ListAdaptiveMtSentences(TranslationServiceRestStub): + def __hash__(self): + return hash("ListAdaptiveMtSentences") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: adaptive_mt.ListAdaptiveMtSentencesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.ListAdaptiveMtSentencesResponse: + r"""Call the list adaptive mt + sentences method over HTTP. + + Args: + request (~.adaptive_mt.ListAdaptiveMtSentencesRequest): + The request object. The request for listing Adaptive MT + sentences from a Dataset/File. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.adaptive_mt.ListAdaptiveMtSentencesResponse: + List AdaptiveMt sentences response. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}/adaptiveMtSentences", + }, + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*}/adaptiveMtSentences", + }, + ] + request, metadata = self._interceptor.pre_list_adaptive_mt_sentences( + request, metadata + ) + pb_request = adaptive_mt.ListAdaptiveMtSentencesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = adaptive_mt.ListAdaptiveMtSentencesResponse() + pb_resp = adaptive_mt.ListAdaptiveMtSentencesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_adaptive_mt_sentences(resp) + return resp + + class _ListDatasets(TranslationServiceRestStub): + def __hash__(self): + return hash("ListDatasets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: automl_translation.ListDatasetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> automl_translation.ListDatasetsResponse: + r"""Call the list datasets method over HTTP. + + Args: + request (~.automl_translation.ListDatasetsRequest): + The request object. Request message for ListDatasets. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.automl_translation.ListDatasetsResponse: + Response message for ListDatasets. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*}/datasets", + }, + ] + request, metadata = self._interceptor.pre_list_datasets(request, metadata) + pb_request = automl_translation.ListDatasetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = automl_translation.ListDatasetsResponse() + pb_resp = automl_translation.ListDatasetsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_datasets(resp) + return resp + + class _ListExamples(TranslationServiceRestStub): + def __hash__(self): + return hash("ListExamples") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: automl_translation.ListExamplesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> automl_translation.ListExamplesResponse: + r"""Call the list examples method over HTTP. + + Args: + request (~.automl_translation.ListExamplesRequest): + The request object. Request message for ListExamples. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.automl_translation.ListExamplesResponse: + Response message for ListExamples. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/datasets/*}/examples", + }, + ] + request, metadata = self._interceptor.pre_list_examples(request, metadata) + pb_request = automl_translation.ListExamplesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = automl_translation.ListExamplesResponse() + pb_resp = automl_translation.ListExamplesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_examples(resp) + return resp + + class _ListGlossaries(TranslationServiceRestStub): + def __hash__(self): + return hash("ListGlossaries") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: translation_service.ListGlossariesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> translation_service.ListGlossariesResponse: + r"""Call the list glossaries method over HTTP. + + Args: + request (~.translation_service.ListGlossariesRequest): + The request object. Request message for ListGlossaries. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.translation_service.ListGlossariesResponse: + Response message for ListGlossaries. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*}/glossaries", + }, + ] + request, metadata = self._interceptor.pre_list_glossaries(request, metadata) + pb_request = translation_service.ListGlossariesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = translation_service.ListGlossariesResponse() + pb_resp = translation_service.ListGlossariesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_glossaries(resp) + return resp + + class _ListGlossaryEntries(TranslationServiceRestStub): + def __hash__(self): + return hash("ListGlossaryEntries") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: translation_service.ListGlossaryEntriesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> translation_service.ListGlossaryEntriesResponse: + r"""Call the list glossary entries method over HTTP. + + Args: + request (~.translation_service.ListGlossaryEntriesRequest): + The request object. Request message for + ListGlossaryEntries + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.translation_service.ListGlossaryEntriesResponse: + Response message for + ListGlossaryEntries + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/glossaries/*}/glossaryEntries", + }, + ] + request, metadata = self._interceptor.pre_list_glossary_entries( + request, metadata + ) + pb_request = translation_service.ListGlossaryEntriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = translation_service.ListGlossaryEntriesResponse() + pb_resp = translation_service.ListGlossaryEntriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_glossary_entries(resp) + return resp + + class _ListModels(TranslationServiceRestStub): + def __hash__(self): + return hash("ListModels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: automl_translation.ListModelsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> automl_translation.ListModelsResponse: + r"""Call the list models method over HTTP. + + Args: + request (~.automl_translation.ListModelsRequest): + The request object. Request message for ListModels. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.automl_translation.ListModelsResponse: + Response message for ListModels. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*}/models", + }, + ] + request, metadata = self._interceptor.pre_list_models(request, metadata) + pb_request = automl_translation.ListModelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = automl_translation.ListModelsResponse() + pb_resp = automl_translation.ListModelsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_models(resp) + return resp + + class _RomanizeText(TranslationServiceRestStub): + def __hash__(self): + return hash("RomanizeText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: translation_service.RomanizeTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> translation_service.RomanizeTextResponse: + r"""Call the romanize text method over HTTP. + + Args: + request (~.translation_service.RomanizeTextRequest): + The request object. The request message for synchronous + romanization. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.translation_service.RomanizeTextResponse: + The response message for synchronous + romanization. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*}:romanizeText", + "body": "*", + }, + { + "method": "post", + "uri": "/v3/{parent=projects/*}:romanizeText", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_romanize_text(request, metadata) + pb_request = translation_service.RomanizeTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = translation_service.RomanizeTextResponse() + pb_resp = translation_service.RomanizeTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_romanize_text(resp) + return resp + + class _TranslateDocument(TranslationServiceRestStub): + def __hash__(self): + return hash("TranslateDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: translation_service.TranslateDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> translation_service.TranslateDocumentResponse: + r"""Call the translate document method over HTTP. + + Args: + request (~.translation_service.TranslateDocumentRequest): + The request object. A document translation request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.translation_service.TranslateDocumentResponse: + A translated document response + message. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*}:translateDocument", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_translate_document( + request, metadata + ) + pb_request = translation_service.TranslateDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = translation_service.TranslateDocumentResponse() + pb_resp = translation_service.TranslateDocumentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_translate_document(resp) + return resp + + class _TranslateText(TranslationServiceRestStub): + def __hash__(self): + return hash("TranslateText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: translation_service.TranslateTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> translation_service.TranslateTextResponse: + r"""Call the translate text method over HTTP. + + Args: + request (~.translation_service.TranslateTextRequest): + The request object. The request message for synchronous + translation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.translation_service.TranslateTextResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*}:translateText", + "body": "*", + }, + { + "method": "post", + "uri": "/v3/{parent=projects/*}:translateText", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_translate_text(request, metadata) + pb_request = translation_service.TranslateTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = translation_service.TranslateTextResponse() + pb_resp = translation_service.TranslateTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_translate_text(resp) + return resp + + class _UpdateGlossary(TranslationServiceRestStub): + def __hash__(self): + return hash("UpdateGlossary") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: translation_service.UpdateGlossaryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update glossary method over HTTP. + + Args: + request (~.translation_service.UpdateGlossaryRequest): + The request object. Request message for the update + glossary flow + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3/{glossary.name=projects/*/locations/*/glossaries/*}", + "body": "glossary", + }, + ] + request, metadata = self._interceptor.pre_update_glossary(request, metadata) + pb_request = translation_service.UpdateGlossaryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_glossary(resp) + return resp + + class _UpdateGlossaryEntry(TranslationServiceRestStub): + def __hash__(self): + return hash("UpdateGlossaryEntry") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: translation_service.UpdateGlossaryEntryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common.GlossaryEntry: + r"""Call the update glossary entry method over HTTP. + + Args: + request (~.translation_service.UpdateGlossaryEntryRequest): + The request object. Request message for + UpdateGlossaryEntry + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common.GlossaryEntry: + Represents a single entry in a + glossary. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3/{glossary_entry.name=projects/*/locations/*/glossaries/*/glossaryEntries/*}", + "body": "glossary_entry", + }, + ] + request, metadata = self._interceptor.pre_update_glossary_entry( + request, metadata + ) + pb_request = translation_service.UpdateGlossaryEntryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common.GlossaryEntry() + pb_resp = common.GlossaryEntry.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_glossary_entry(resp) + return resp + + @property + def adaptive_mt_translate( + self, + ) -> Callable[ + [adaptive_mt.AdaptiveMtTranslateRequest], + adaptive_mt.AdaptiveMtTranslateResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AdaptiveMtTranslate(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_translate_document( + self, + ) -> Callable[ + [translation_service.BatchTranslateDocumentRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchTranslateDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_translate_text( + self, + ) -> Callable[ + [translation_service.BatchTranslateTextRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchTranslateText(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_adaptive_mt_dataset( + self, + ) -> Callable[ + [adaptive_mt.CreateAdaptiveMtDatasetRequest], adaptive_mt.AdaptiveMtDataset + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAdaptiveMtDataset(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_dataset( + self, + ) -> Callable[[automl_translation.CreateDatasetRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDataset(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_glossary( + self, + ) -> Callable[ + [translation_service.CreateGlossaryRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateGlossary(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_glossary_entry( + self, + ) -> Callable[ + [translation_service.CreateGlossaryEntryRequest], common.GlossaryEntry + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateGlossaryEntry(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_model( + self, + ) -> Callable[[automl_translation.CreateModelRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_adaptive_mt_dataset( + self, + ) -> Callable[[adaptive_mt.DeleteAdaptiveMtDatasetRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAdaptiveMtDataset(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_adaptive_mt_file( + self, + ) -> Callable[[adaptive_mt.DeleteAdaptiveMtFileRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAdaptiveMtFile(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_dataset( + self, + ) -> Callable[[automl_translation.DeleteDatasetRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDataset(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_glossary( + self, + ) -> Callable[ + [translation_service.DeleteGlossaryRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteGlossary(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_glossary_entry( + self, + ) -> Callable[[translation_service.DeleteGlossaryEntryRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteGlossaryEntry(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_model( + self, + ) -> Callable[[automl_translation.DeleteModelRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def detect_language( + self, + ) -> Callable[ + [translation_service.DetectLanguageRequest], + translation_service.DetectLanguageResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DetectLanguage(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_data( + self, + ) -> Callable[[automl_translation.ExportDataRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportData(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_adaptive_mt_dataset( + self, + ) -> Callable[ + [adaptive_mt.GetAdaptiveMtDatasetRequest], adaptive_mt.AdaptiveMtDataset + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAdaptiveMtDataset(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_adaptive_mt_file( + self, + ) -> Callable[[adaptive_mt.GetAdaptiveMtFileRequest], adaptive_mt.AdaptiveMtFile]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAdaptiveMtFile(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_dataset( + self, + ) -> Callable[[automl_translation.GetDatasetRequest], automl_translation.Dataset]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataset(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_glossary( + self, + ) -> Callable[ + [translation_service.GetGlossaryRequest], translation_service.Glossary + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGlossary(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_glossary_entry( + self, + ) -> Callable[[translation_service.GetGlossaryEntryRequest], common.GlossaryEntry]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGlossaryEntry(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_model( + self, + ) -> Callable[[automl_translation.GetModelRequest], automl_translation.Model]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_supported_languages( + self, + ) -> Callable[ + [translation_service.GetSupportedLanguagesRequest], + translation_service.SupportedLanguages, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSupportedLanguages(self._session, self._host, self._interceptor) # type: ignore + + @property + def import_adaptive_mt_file( + self, + ) -> Callable[ + [adaptive_mt.ImportAdaptiveMtFileRequest], + adaptive_mt.ImportAdaptiveMtFileResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportAdaptiveMtFile(self._session, self._host, self._interceptor) # type: ignore + + @property + def import_data( + self, + ) -> Callable[[automl_translation.ImportDataRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportData(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_adaptive_mt_datasets( + self, + ) -> Callable[ + [adaptive_mt.ListAdaptiveMtDatasetsRequest], + adaptive_mt.ListAdaptiveMtDatasetsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAdaptiveMtDatasets(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_adaptive_mt_files( + self, + ) -> Callable[ + [adaptive_mt.ListAdaptiveMtFilesRequest], + adaptive_mt.ListAdaptiveMtFilesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAdaptiveMtFiles(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_adaptive_mt_sentences( + self, + ) -> Callable[ + [adaptive_mt.ListAdaptiveMtSentencesRequest], + adaptive_mt.ListAdaptiveMtSentencesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAdaptiveMtSentences(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_datasets( + self, + ) -> Callable[ + [automl_translation.ListDatasetsRequest], + automl_translation.ListDatasetsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDatasets(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_examples( + self, + ) -> Callable[ + [automl_translation.ListExamplesRequest], + automl_translation.ListExamplesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListExamples(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_glossaries( + self, + ) -> Callable[ + [translation_service.ListGlossariesRequest], + translation_service.ListGlossariesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListGlossaries(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_glossary_entries( + self, + ) -> Callable[ + [translation_service.ListGlossaryEntriesRequest], + translation_service.ListGlossaryEntriesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListGlossaryEntries(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_models( + self, + ) -> Callable[ + [automl_translation.ListModelsRequest], automl_translation.ListModelsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListModels(self._session, self._host, self._interceptor) # type: ignore + + @property + def romanize_text( + self, + ) -> Callable[ + [translation_service.RomanizeTextRequest], + translation_service.RomanizeTextResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RomanizeText(self._session, self._host, self._interceptor) # type: ignore + + @property + def translate_document( + self, + ) -> Callable[ + [translation_service.TranslateDocumentRequest], + translation_service.TranslateDocumentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TranslateDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def translate_text( + self, + ) -> Callable[ + [translation_service.TranslateTextRequest], + translation_service.TranslateTextResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TranslateText(self._session, self._host, self._interceptor) # type: ignore + @property - def adaptive_mt_translate( + def update_glossary( self, ) -> Callable[ - [adaptive_mt.AdaptiveMtTranslateRequest], - adaptive_mt.AdaptiveMtTranslateResponse, + [translation_service.UpdateGlossaryRequest], operations_pb2.Operation ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AdaptiveMtTranslate(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateGlossary(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_glossary_entry( + self, + ) -> Callable[ + [translation_service.UpdateGlossaryEntryRequest], common.GlossaryEntry + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateGlossaryEntry(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(TranslationServiceRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(TranslationServiceRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(TranslationServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(TranslationServiceRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(TranslationServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) - @property - def batch_translate_document( - self, - ) -> Callable[ - [translation_service.BatchTranslateDocumentRequest], operations_pb2.Operation - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BatchTranslateDocument(self._session, self._host, self._interceptor) # type: ignore + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp @property - def batch_translate_text( - self, - ) -> Callable[ - [translation_service.BatchTranslateTextRequest], operations_pb2.Operation - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BatchTranslateText(self._session, self._host, self._interceptor) # type: ignore + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - @property - def create_adaptive_mt_dataset( - self, - ) -> Callable[ - [adaptive_mt.CreateAdaptiveMtDatasetRequest], adaptive_mt.AdaptiveMtDataset - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateAdaptiveMtDataset(self._session, self._host, self._interceptor) # type: ignore + class _ListOperations(TranslationServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. - @property - def create_glossary( - self, - ) -> Callable[ - [translation_service.CreateGlossaryRequest], operations_pb2.Operation - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateGlossary(self._session, self._host, self._interceptor) # type: ignore + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - @property - def delete_adaptive_mt_dataset( - self, - ) -> Callable[[adaptive_mt.DeleteAdaptiveMtDatasetRequest], empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteAdaptiveMtDataset(self._session, self._host, self._interceptor) # type: ignore + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ - @property - def delete_adaptive_mt_file( - self, - ) -> Callable[[adaptive_mt.DeleteAdaptiveMtFileRequest], empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteAdaptiveMtFile(self._session, self._host, self._interceptor) # type: ignore + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] - @property - def delete_glossary( - self, - ) -> Callable[ - [translation_service.DeleteGlossaryRequest], operations_pb2.Operation - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteGlossary(self._session, self._host, self._interceptor) # type: ignore + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) - @property - def detect_language( - self, - ) -> Callable[ - [translation_service.DetectLanguageRequest], - translation_service.DetectLanguageResponse, - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DetectLanguage(self._session, self._host, self._interceptor) # type: ignore + uri = transcoded_request["uri"] + method = transcoded_request["method"] - @property - def get_adaptive_mt_dataset( - self, - ) -> Callable[ - [adaptive_mt.GetAdaptiveMtDatasetRequest], adaptive_mt.AdaptiveMtDataset - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetAdaptiveMtDataset(self._session, self._host, self._interceptor) # type: ignore + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) - @property - def get_adaptive_mt_file( - self, - ) -> Callable[[adaptive_mt.GetAdaptiveMtFileRequest], adaptive_mt.AdaptiveMtFile]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetAdaptiveMtFile(self._session, self._host, self._interceptor) # type: ignore + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" - @property - def get_glossary( - self, - ) -> Callable[ - [translation_service.GetGlossaryRequest], translation_service.Glossary - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetGlossary(self._session, self._host, self._interceptor) # type: ignore + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) - @property - def get_supported_languages( - self, - ) -> Callable[ - [translation_service.GetSupportedLanguagesRequest], - translation_service.SupportedLanguages, - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetSupportedLanguages(self._session, self._host, self._interceptor) # type: ignore + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) - @property - def import_adaptive_mt_file( - self, - ) -> Callable[ - [adaptive_mt.ImportAdaptiveMtFileRequest], - adaptive_mt.ImportAdaptiveMtFileResponse, - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ImportAdaptiveMtFile(self._session, self._host, self._interceptor) # type: ignore + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp @property - def list_adaptive_mt_datasets( - self, - ) -> Callable[ - [adaptive_mt.ListAdaptiveMtDatasetsRequest], - adaptive_mt.ListAdaptiveMtDatasetsResponse, - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListAdaptiveMtDatasets(self._session, self._host, self._interceptor) # type: ignore + def wait_operation(self): + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore - @property - def list_adaptive_mt_files( - self, - ) -> Callable[ - [adaptive_mt.ListAdaptiveMtFilesRequest], - adaptive_mt.ListAdaptiveMtFilesResponse, - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListAdaptiveMtFiles(self._session, self._host, self._interceptor) # type: ignore + class _WaitOperation(TranslationServiceRestStub): + def __call__( + self, + request: operations_pb2.WaitOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the wait operation method over HTTP. - @property - def list_adaptive_mt_sentences( - self, - ) -> Callable[ - [adaptive_mt.ListAdaptiveMtSentencesRequest], - adaptive_mt.ListAdaptiveMtSentencesResponse, - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListAdaptiveMtSentences(self._session, self._host, self._interceptor) # type: ignore + Args: + request (operations_pb2.WaitOperationRequest): + The request object for WaitOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - @property - def list_glossaries( - self, - ) -> Callable[ - [translation_service.ListGlossariesRequest], - translation_service.ListGlossariesResponse, - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListGlossaries(self._session, self._host, self._interceptor) # type: ignore + Returns: + operations_pb2.Operation: Response from WaitOperation method. + """ - @property - def translate_document( - self, - ) -> Callable[ - [translation_service.TranslateDocumentRequest], - translation_service.TranslateDocumentResponse, - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._TranslateDocument(self._session, self._host, self._interceptor) # type: ignore + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:wait", + "body": "*", + }, + ] - @property - def translate_text( - self, - ) -> Callable[ - [translation_service.TranslateTextRequest], - translation_service.TranslateTextResponse, - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._TranslateText(self._session, self._host, self._interceptor) # type: ignore + request, metadata = self._interceptor.pre_wait_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_wait_operation(resp) + return resp @property def kind(self) -> str: diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/types/__init__.py b/packages/google-cloud-translate/google/cloud/translate_v3/types/__init__.py index f5157750e765..984b31475859 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/types/__init__.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/types/__init__.py @@ -34,7 +34,42 @@ ListAdaptiveMtSentencesRequest, ListAdaptiveMtSentencesResponse, ) -from .common import FileInputSource, GcsInputSource, GcsOutputDestination +from .automl_translation import ( + BatchTransferResourcesResponse, + CreateDatasetMetadata, + CreateDatasetRequest, + CreateModelMetadata, + CreateModelRequest, + Dataset, + DatasetInputConfig, + DatasetOutputConfig, + DeleteDatasetMetadata, + DeleteDatasetRequest, + DeleteModelMetadata, + DeleteModelRequest, + Example, + ExportDataMetadata, + ExportDataRequest, + GetDatasetRequest, + GetModelRequest, + ImportDataMetadata, + ImportDataRequest, + ListDatasetsRequest, + ListDatasetsResponse, + ListExamplesRequest, + ListExamplesResponse, + ListModelsRequest, + ListModelsResponse, + Model, +) +from .common import ( + FileInputSource, + GcsInputSource, + GcsOutputDestination, + GlossaryEntry, + GlossaryTerm, + OperationState, +) from .translation_service import ( BatchDocumentInputConfig, BatchDocumentOutputConfig, @@ -44,8 +79,10 @@ BatchTranslateMetadata, BatchTranslateResponse, BatchTranslateTextRequest, + CreateGlossaryEntryRequest, CreateGlossaryMetadata, CreateGlossaryRequest, + DeleteGlossaryEntryRequest, DeleteGlossaryMetadata, DeleteGlossaryRequest, DeleteGlossaryResponse, @@ -57,6 +94,7 @@ DocumentTranslation, GcsDestination, GcsSource, + GetGlossaryEntryRequest, GetGlossaryRequest, GetSupportedLanguagesRequest, Glossary, @@ -64,7 +102,12 @@ InputConfig, ListGlossariesRequest, ListGlossariesResponse, + ListGlossaryEntriesRequest, + ListGlossaryEntriesResponse, OutputConfig, + Romanization, + RomanizeTextRequest, + RomanizeTextResponse, SupportedLanguage, SupportedLanguages, TranslateDocumentRequest, @@ -73,6 +116,10 @@ TranslateTextRequest, TranslateTextResponse, Translation, + TransliterationConfig, + UpdateGlossaryEntryRequest, + UpdateGlossaryMetadata, + UpdateGlossaryRequest, ) __all__ = ( @@ -95,9 +142,38 @@ "ListAdaptiveMtFilesResponse", "ListAdaptiveMtSentencesRequest", "ListAdaptiveMtSentencesResponse", + "BatchTransferResourcesResponse", + "CreateDatasetMetadata", + "CreateDatasetRequest", + "CreateModelMetadata", + "CreateModelRequest", + "Dataset", + "DatasetInputConfig", + "DatasetOutputConfig", + "DeleteDatasetMetadata", + "DeleteDatasetRequest", + "DeleteModelMetadata", + "DeleteModelRequest", + "Example", + "ExportDataMetadata", + "ExportDataRequest", + "GetDatasetRequest", + "GetModelRequest", + "ImportDataMetadata", + "ImportDataRequest", + "ListDatasetsRequest", + "ListDatasetsResponse", + "ListExamplesRequest", + "ListExamplesResponse", + "ListModelsRequest", + "ListModelsResponse", + "Model", "FileInputSource", "GcsInputSource", "GcsOutputDestination", + "GlossaryEntry", + "GlossaryTerm", + "OperationState", "BatchDocumentInputConfig", "BatchDocumentOutputConfig", "BatchTranslateDocumentMetadata", @@ -106,8 +182,10 @@ "BatchTranslateMetadata", "BatchTranslateResponse", "BatchTranslateTextRequest", + "CreateGlossaryEntryRequest", "CreateGlossaryMetadata", "CreateGlossaryRequest", + "DeleteGlossaryEntryRequest", "DeleteGlossaryMetadata", "DeleteGlossaryRequest", "DeleteGlossaryResponse", @@ -119,6 +197,7 @@ "DocumentTranslation", "GcsDestination", "GcsSource", + "GetGlossaryEntryRequest", "GetGlossaryRequest", "GetSupportedLanguagesRequest", "Glossary", @@ -126,7 +205,12 @@ "InputConfig", "ListGlossariesRequest", "ListGlossariesResponse", + "ListGlossaryEntriesRequest", + "ListGlossaryEntriesResponse", "OutputConfig", + "Romanization", + "RomanizeTextRequest", + "RomanizeTextResponse", "SupportedLanguage", "SupportedLanguages", "TranslateDocumentRequest", @@ -135,4 +219,8 @@ "TranslateTextRequest", "TranslateTextResponse", "Translation", + "TransliterationConfig", + "UpdateGlossaryEntryRequest", + "UpdateGlossaryMetadata", + "UpdateGlossaryRequest", ) diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/types/adaptive_mt.py b/packages/google-cloud-translate/google/cloud/translate_v3/types/adaptive_mt.py index a58d39ad055b..d90cb3355af6 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/types/adaptive_mt.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/types/adaptive_mt.py @@ -237,6 +237,8 @@ def raw_page(self): class AdaptiveMtTranslateRequest(proto.Message): r"""The request for sending an AdaptiveMt translation query. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: parent (str): Required. Location to make a regional call. @@ -249,10 +251,118 @@ class AdaptiveMtTranslateRequest(proto.Message): ``projects/{project}/locations/{location-id}/adaptiveMtDatasets/{dataset}`` content (MutableSequence[str]): Required. The content of the input in string - format. For now only one sentence per request is - supported. + format. + reference_sentence_config (google.cloud.translate_v3.types.AdaptiveMtTranslateRequest.ReferenceSentenceConfig): + Configuration for caller provided reference + sentences. + + This field is a member of `oneof`_ ``_reference_sentence_config``. + glossary_config (google.cloud.translate_v3.types.AdaptiveMtTranslateRequest.GlossaryConfig): + Optional. Glossary to be applied. The glossary must be + within the same region (have the same location-id) as the + model, otherwise an INVALID_ARGUMENT (400) error is + returned. + + This field is a member of `oneof`_ ``_glossary_config``. """ + class ReferenceSentencePair(proto.Message): + r"""A pair of sentences used as reference in source and target + languages. + + Attributes: + source_sentence (str): + Source sentence in the sentence pair. + target_sentence (str): + Target sentence in the sentence pair. + """ + + source_sentence: str = proto.Field( + proto.STRING, + number=1, + ) + target_sentence: str = proto.Field( + proto.STRING, + number=2, + ) + + class ReferenceSentencePairList(proto.Message): + r"""A list of reference sentence pairs. + + Attributes: + reference_sentence_pairs (MutableSequence[google.cloud.translate_v3.types.AdaptiveMtTranslateRequest.ReferenceSentencePair]): + Reference sentence pairs. + """ + + reference_sentence_pairs: MutableSequence[ + "AdaptiveMtTranslateRequest.ReferenceSentencePair" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AdaptiveMtTranslateRequest.ReferenceSentencePair", + ) + + class ReferenceSentenceConfig(proto.Message): + r"""Message of caller-provided reference configuration. + + Attributes: + reference_sentence_pair_lists (MutableSequence[google.cloud.translate_v3.types.AdaptiveMtTranslateRequest.ReferenceSentencePairList]): + Reference sentences pair lists. Each list + will be used as the references to translate the + sentence under "content" field at the + corresponding index. Length of the list is + required to be equal to the length of "content" + field. + source_language_code (str): + Source language code. + target_language_code (str): + Target language code. + """ + + reference_sentence_pair_lists: MutableSequence[ + "AdaptiveMtTranslateRequest.ReferenceSentencePairList" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AdaptiveMtTranslateRequest.ReferenceSentencePairList", + ) + source_language_code: str = proto.Field( + proto.STRING, + number=2, + ) + target_language_code: str = proto.Field( + proto.STRING, + number=3, + ) + + class GlossaryConfig(proto.Message): + r"""Configures which glossary is used for a specific target + language and defines + options for applying that glossary. + + Attributes: + glossary (str): + Required. The ``glossary`` to be applied for this + translation. + + The format depends on the glossary: + + - User-provided custom glossary: + ``projects/{project-number-or-id}/locations/{location-id}/glossaries/{glossary-id}`` + ignore_case (bool): + Optional. Indicates match is case insensitive. The default + value is ``false`` if missing. + """ + + glossary: str = proto.Field( + proto.STRING, + number=1, + ) + ignore_case: bool = proto.Field( + proto.BOOL, + number=2, + ) + parent: str = proto.Field( proto.STRING, number=1, @@ -265,6 +375,18 @@ class AdaptiveMtTranslateRequest(proto.Message): proto.STRING, number=3, ) + reference_sentence_config: ReferenceSentenceConfig = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=ReferenceSentenceConfig, + ) + glossary_config: GlossaryConfig = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message=GlossaryConfig, + ) class AdaptiveMtTranslation(proto.Message): @@ -289,6 +411,10 @@ class AdaptiveMtTranslateResponse(proto.Message): Output only. The translation. language_code (str): Output only. The translation's language code. + glossary_translations (MutableSequence[google.cloud.translate_v3.types.AdaptiveMtTranslation]): + Text translation response if a glossary is + provided in the request. This could be the same + as 'translation' above if no terms apply. """ translations: MutableSequence["AdaptiveMtTranslation"] = proto.RepeatedField( @@ -300,6 +426,13 @@ class AdaptiveMtTranslateResponse(proto.Message): proto.STRING, number=2, ) + glossary_translations: MutableSequence[ + "AdaptiveMtTranslation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="AdaptiveMtTranslation", + ) class AdaptiveMtFile(proto.Message): diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/types/automl_translation.py b/packages/google-cloud-translate/google/cloud/translate_v3/types/automl_translation.py new file mode 100644 index 000000000000..87aeb42cbb01 --- /dev/null +++ b/packages/google-cloud-translate/google/cloud/translate_v3/types/automl_translation.py @@ -0,0 +1,923 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.translate_v3.types import common + +__protobuf__ = proto.module( + package="google.cloud.translation.v3", + manifest={ + "ImportDataRequest", + "DatasetInputConfig", + "ImportDataMetadata", + "ExportDataRequest", + "DatasetOutputConfig", + "ExportDataMetadata", + "DeleteDatasetRequest", + "DeleteDatasetMetadata", + "GetDatasetRequest", + "ListDatasetsRequest", + "ListDatasetsResponse", + "CreateDatasetRequest", + "CreateDatasetMetadata", + "ListExamplesRequest", + "ListExamplesResponse", + "Example", + "BatchTransferResourcesResponse", + "Dataset", + "CreateModelRequest", + "CreateModelMetadata", + "ListModelsRequest", + "ListModelsResponse", + "GetModelRequest", + "DeleteModelRequest", + "DeleteModelMetadata", + "Model", + }, +) + + +class ImportDataRequest(proto.Message): + r"""Request message for ImportData. + + Attributes: + dataset (str): + Required. Name of the dataset. In form of + ``projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`` + input_config (google.cloud.translate_v3.types.DatasetInputConfig): + Required. The config for the input content. + """ + + dataset: str = proto.Field( + proto.STRING, + number=1, + ) + input_config: "DatasetInputConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="DatasetInputConfig", + ) + + +class DatasetInputConfig(proto.Message): + r"""Input configuration for datasets. + + Attributes: + input_files (MutableSequence[google.cloud.translate_v3.types.DatasetInputConfig.InputFile]): + Files containing the sentence pairs to be + imported to the dataset. + """ + + class InputFile(proto.Message): + r"""An input file. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + usage (str): + Optional. Usage of the file contents. Options are + TRAIN|VALIDATION|TEST, or UNASSIGNED (by default) for auto + split. + gcs_source (google.cloud.translate_v3.types.GcsInputSource): + Google Cloud Storage file source. + + This field is a member of `oneof`_ ``source``. + """ + + usage: str = proto.Field( + proto.STRING, + number=2, + ) + gcs_source: common.GcsInputSource = proto.Field( + proto.MESSAGE, + number=3, + oneof="source", + message=common.GcsInputSource, + ) + + input_files: MutableSequence[InputFile] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=InputFile, + ) + + +class ImportDataMetadata(proto.Message): + r"""Metadata of import data operation. + + Attributes: + state (google.cloud.translate_v3.types.OperationState): + The current state of the operation. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The creation time of the operation. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The last update time of the operation. + error (google.rpc.status_pb2.Status): + Only populated when operation doesn't + succeed. + """ + + state: common.OperationState = proto.Field( + proto.ENUM, + number=1, + enum=common.OperationState, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + +class ExportDataRequest(proto.Message): + r"""Request message for ExportData. + + Attributes: + dataset (str): + Required. Name of the dataset. In form of + ``projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`` + output_config (google.cloud.translate_v3.types.DatasetOutputConfig): + Required. The config for the output content. + """ + + dataset: str = proto.Field( + proto.STRING, + number=1, + ) + output_config: "DatasetOutputConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="DatasetOutputConfig", + ) + + +class DatasetOutputConfig(proto.Message): + r"""Output configuration for datasets. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_destination (google.cloud.translate_v3.types.GcsOutputDestination): + Google Cloud Storage destination to write the + output. + + This field is a member of `oneof`_ ``destination``. + """ + + gcs_destination: common.GcsOutputDestination = proto.Field( + proto.MESSAGE, + number=1, + oneof="destination", + message=common.GcsOutputDestination, + ) + + +class ExportDataMetadata(proto.Message): + r"""Metadata of export data operation. + + Attributes: + state (google.cloud.translate_v3.types.OperationState): + The current state of the operation. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The creation time of the operation. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The last update time of the operation. + error (google.rpc.status_pb2.Status): + Only populated when operation doesn't + succeed. + """ + + state: common.OperationState = proto.Field( + proto.ENUM, + number=1, + enum=common.OperationState, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + +class DeleteDatasetRequest(proto.Message): + r"""Request message for DeleteDataset. + + Attributes: + name (str): + Required. The name of the dataset to delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteDatasetMetadata(proto.Message): + r"""Metadata of delete dataset operation. + + Attributes: + state (google.cloud.translate_v3.types.OperationState): + The current state of the operation. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The creation time of the operation. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The last update time of the operation. + error (google.rpc.status_pb2.Status): + Only populated when operation doesn't + succeed. + """ + + state: common.OperationState = proto.Field( + proto.ENUM, + number=1, + enum=common.OperationState, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + +class GetDatasetRequest(proto.Message): + r"""Request message for GetDataset. + + Attributes: + name (str): + Required. The resource name of the dataset to + retrieve. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDatasetsRequest(proto.Message): + r"""Request message for ListDatasets. + + Attributes: + parent (str): + Required. Name of the parent project. In form of + ``projects/{project-number-or-id}/locations/{location-id}`` + page_size (int): + Optional. Requested page size. The server can + return fewer results than requested. + page_token (str): + Optional. A token identifying a page of results for the + server to return. Typically obtained from next_page_token + field in the response of a ListDatasets call. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDatasetsResponse(proto.Message): + r"""Response message for ListDatasets. + + Attributes: + datasets (MutableSequence[google.cloud.translate_v3.types.Dataset]): + The datasets read. + next_page_token (str): + A token to retrieve next page of results. Pass this token to + the page_token field in the ListDatasetsRequest to obtain + the corresponding page. + """ + + @property + def raw_page(self): + return self + + datasets: MutableSequence["Dataset"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Dataset", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateDatasetRequest(proto.Message): + r"""Request message for CreateDataset. + + Attributes: + parent (str): + Required. The project name. + dataset (google.cloud.translate_v3.types.Dataset): + Required. The Dataset to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + dataset: "Dataset" = proto.Field( + proto.MESSAGE, + number=2, + message="Dataset", + ) + + +class CreateDatasetMetadata(proto.Message): + r"""Metadata of create dataset operation. + + Attributes: + state (google.cloud.translate_v3.types.OperationState): + The current state of the operation. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The creation time of the operation. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The last update time of the operation. + error (google.rpc.status_pb2.Status): + Only populated when operation doesn't + succeed. + """ + + state: common.OperationState = proto.Field( + proto.ENUM, + number=1, + enum=common.OperationState, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + +class ListExamplesRequest(proto.Message): + r"""Request message for ListExamples. + + Attributes: + parent (str): + Required. Name of the parent dataset. In form of + ``projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`` + filter (str): + Optional. An expression for filtering the examples that will + be returned. Example filter: + + - ``usage=TRAIN`` + page_size (int): + Optional. Requested page size. The server can + return fewer results than requested. + page_token (str): + Optional. A token identifying a page of results for the + server to return. Typically obtained from next_page_token + field in the response of a ListExamples call. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListExamplesResponse(proto.Message): + r"""Response message for ListExamples. + + Attributes: + examples (MutableSequence[google.cloud.translate_v3.types.Example]): + The sentence pairs. + next_page_token (str): + A token to retrieve next page of results. Pass this token to + the page_token field in the ListExamplesRequest to obtain + the corresponding page. + """ + + @property + def raw_page(self): + return self + + examples: MutableSequence["Example"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Example", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Example(proto.Message): + r"""A sentence pair. + + Attributes: + name (str): + Output only. The resource name of the example, in form of + \`projects/{project-number-or-id}/locations/{location_id}/datasets/{dataset_id}/examples/{example_id}' + source_text (str): + Sentence in source language. + target_text (str): + Sentence in target language. + usage (str): + Output only. Usage of the sentence pair. Options are + TRAIN|VALIDATION|TEST. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + source_text: str = proto.Field( + proto.STRING, + number=2, + ) + target_text: str = proto.Field( + proto.STRING, + number=3, + ) + usage: str = proto.Field( + proto.STRING, + number=4, + ) + + +class BatchTransferResourcesResponse(proto.Message): + r"""Response message for BatchTransferResources. + + Attributes: + responses (MutableSequence[google.cloud.translate_v3.types.BatchTransferResourcesResponse.TransferResourceResponse]): + Responses of the transfer for individual + resources. + """ + + class TransferResourceResponse(proto.Message): + r"""Transfer response for a single resource. + + Attributes: + source (str): + Full name of the resource to transfer as + specified in the request. + target (str): + Full name of the new resource successfully + transferred from the source hosted by + Translation API. Target will be empty if the + transfer failed. + error (google.rpc.status_pb2.Status): + The error result in case of failure. + """ + + source: str = proto.Field( + proto.STRING, + number=1, + ) + target: str = proto.Field( + proto.STRING, + number=2, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=3, + message=status_pb2.Status, + ) + + responses: MutableSequence[TransferResourceResponse] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=TransferResourceResponse, + ) + + +class Dataset(proto.Message): + r"""A dataset that hosts the examples (sentence pairs) used for + translation models. + + Attributes: + name (str): + The resource name of the dataset, in form of + ``projects/{project-number-or-id}/locations/{location_id}/datasets/{dataset_id}`` + display_name (str): + The name of the dataset to show in the interface. The name + can be up to 32 characters long and can consist only of + ASCII Latin letters A-Z and a-z, underscores (_), and ASCII + digits 0-9. + source_language_code (str): + The BCP-47 language code of the source + language. + target_language_code (str): + The BCP-47 language code of the target + language. + example_count (int): + Output only. The number of examples in the + dataset. + train_example_count (int): + Output only. Number of training examples + (sentence pairs). + validate_example_count (int): + Output only. Number of validation examples + (sentence pairs). + test_example_count (int): + Output only. Number of test examples + (sentence pairs). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when this dataset was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when this dataset was + last updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + source_language_code: str = proto.Field( + proto.STRING, + number=3, + ) + target_language_code: str = proto.Field( + proto.STRING, + number=4, + ) + example_count: int = proto.Field( + proto.INT32, + number=5, + ) + train_example_count: int = proto.Field( + proto.INT32, + number=6, + ) + validate_example_count: int = proto.Field( + proto.INT32, + number=7, + ) + test_example_count: int = proto.Field( + proto.INT32, + number=8, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + + +class CreateModelRequest(proto.Message): + r"""Request message for CreateModel. + + Attributes: + parent (str): + Required. The project name, in form of + ``projects/{project}/locations/{location}`` + model (google.cloud.translate_v3.types.Model): + Required. The Model to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + model: "Model" = proto.Field( + proto.MESSAGE, + number=2, + message="Model", + ) + + +class CreateModelMetadata(proto.Message): + r"""Metadata of create model operation. + + Attributes: + state (google.cloud.translate_v3.types.OperationState): + The current state of the operation. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The creation time of the operation. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The last update time of the operation. + error (google.rpc.status_pb2.Status): + Only populated when operation doesn't + succeed. + """ + + state: common.OperationState = proto.Field( + proto.ENUM, + number=1, + enum=common.OperationState, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + +class ListModelsRequest(proto.Message): + r"""Request message for ListModels. + + Attributes: + parent (str): + Required. Name of the parent project. In form of + ``projects/{project-number-or-id}/locations/{location-id}`` + filter (str): + Optional. An expression for filtering the models that will + be returned. Supported filter: ``dataset_id=${dataset_id}`` + page_size (int): + Optional. Requested page size. The server can + return fewer results than requested. + page_token (str): + Optional. A token identifying a page of results for the + server to return. Typically obtained from next_page_token + field in the response of a ListModels call. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListModelsResponse(proto.Message): + r"""Response message for ListModels. + + Attributes: + models (MutableSequence[google.cloud.translate_v3.types.Model]): + The models read. + next_page_token (str): + A token to retrieve next page of results. Pass this token to + the page_token field in the ListModelsRequest to obtain the + corresponding page. + """ + + @property + def raw_page(self): + return self + + models: MutableSequence["Model"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Model", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetModelRequest(proto.Message): + r"""Request message for GetModel. + + Attributes: + name (str): + Required. The resource name of the model to + retrieve. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteModelRequest(proto.Message): + r"""Request message for DeleteModel. + + Attributes: + name (str): + Required. The name of the model to delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteModelMetadata(proto.Message): + r"""Metadata of delete model operation. + + Attributes: + state (google.cloud.translate_v3.types.OperationState): + The current state of the operation. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The creation time of the operation. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The last update time of the operation. + error (google.rpc.status_pb2.Status): + Only populated when operation doesn't + succeed. + """ + + state: common.OperationState = proto.Field( + proto.ENUM, + number=1, + enum=common.OperationState, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + +class Model(proto.Message): + r"""A trained translation model. + + Attributes: + name (str): + The resource name of the model, in form of + ``projects/{project-number-or-id}/locations/{location_id}/models/{model_id}`` + display_name (str): + The name of the model to show in the interface. The name can + be up to 32 characters long and can consist only of ASCII + Latin letters A-Z and a-z, underscores (_), and ASCII digits + 0-9. + dataset (str): + The dataset from which the model is trained, in form of + ``projects/{project-number-or-id}/locations/{location_id}/datasets/{dataset_id}`` + source_language_code (str): + Output only. The BCP-47 language code of the + source language. + target_language_code (str): + Output only. The BCP-47 language code of the + target language. + train_example_count (int): + Output only. Number of examples (sentence + pairs) used to train the model. + validate_example_count (int): + Output only. Number of examples (sentence + pairs) used to validate the model. + test_example_count (int): + Output only. Number of examples (sentence + pairs) used to test the model. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the model + resource was created, which is also when the + training started. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when this model was + last updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + dataset: str = proto.Field( + proto.STRING, + number=3, + ) + source_language_code: str = proto.Field( + proto.STRING, + number=4, + ) + target_language_code: str = proto.Field( + proto.STRING, + number=5, + ) + train_example_count: int = proto.Field( + proto.INT32, + number=6, + ) + validate_example_count: int = proto.Field( + proto.INT32, + number=7, + ) + test_example_count: int = proto.Field( + proto.INT32, + number=12, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/types/common.py b/packages/google-cloud-translate/google/cloud/translate_v3/types/common.py index 851b1e06a182..dc68d9c6eab3 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/types/common.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/types/common.py @@ -22,13 +22,45 @@ __protobuf__ = proto.module( package="google.cloud.translation.v3", manifest={ + "OperationState", "GcsInputSource", "FileInputSource", "GcsOutputDestination", + "GlossaryEntry", + "GlossaryTerm", }, ) +class OperationState(proto.Enum): + r"""Possible states of long running operations. + + Values: + OPERATION_STATE_UNSPECIFIED (0): + Invalid. + OPERATION_STATE_RUNNING (1): + Request is being processed. + OPERATION_STATE_SUCCEEDED (2): + The operation was successful. + OPERATION_STATE_FAILED (3): + Failed to process operation. + OPERATION_STATE_CANCELLING (4): + Request is in the process of being canceled + after caller invoked + longrunning.Operations.CancelOperation on the + request id. + OPERATION_STATE_CANCELLED (5): + The operation request was successfully + canceled. + """ + OPERATION_STATE_UNSPECIFIED = 0 + OPERATION_STATE_RUNNING = 1 + OPERATION_STATE_SUCCEEDED = 2 + OPERATION_STATE_FAILED = 3 + OPERATION_STATE_CANCELLING = 4 + OPERATION_STATE_CANCELLED = 5 + + class GcsInputSource(proto.Message): r"""The Google Cloud Storage location for the input content. @@ -87,4 +119,112 @@ class GcsOutputDestination(proto.Message): ) +class GlossaryEntry(proto.Message): + r"""Represents a single entry in a glossary. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The resource name of the entry. Format: + ``projects/*/locations/*/glossaries/*/glossaryEntries/*`` + terms_pair (google.cloud.translate_v3.types.GlossaryEntry.GlossaryTermsPair): + Used for an unidirectional glossary. + + This field is a member of `oneof`_ ``data``. + terms_set (google.cloud.translate_v3.types.GlossaryEntry.GlossaryTermsSet): + Used for an equivalent term sets glossary. + + This field is a member of `oneof`_ ``data``. + description (str): + Describes the glossary entry. + """ + + class GlossaryTermsPair(proto.Message): + r"""Represents a single entry for an unidirectional glossary. + + Attributes: + source_term (google.cloud.translate_v3.types.GlossaryTerm): + The source term is the term that will get + match in the text, + target_term (google.cloud.translate_v3.types.GlossaryTerm): + The term that will replace the match source + term. + """ + + source_term: "GlossaryTerm" = proto.Field( + proto.MESSAGE, + number=1, + message="GlossaryTerm", + ) + target_term: "GlossaryTerm" = proto.Field( + proto.MESSAGE, + number=2, + message="GlossaryTerm", + ) + + class GlossaryTermsSet(proto.Message): + r"""Represents a single entry for an equivalent term set + glossary. This is used for equivalent term sets where each term + can be replaced by the other terms in the set. + + Attributes: + terms (MutableSequence[google.cloud.translate_v3.types.GlossaryTerm]): + Each term in the set represents a term that + can be replaced by the other terms. + """ + + terms: MutableSequence["GlossaryTerm"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="GlossaryTerm", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + terms_pair: GlossaryTermsPair = proto.Field( + proto.MESSAGE, + number=2, + oneof="data", + message=GlossaryTermsPair, + ) + terms_set: GlossaryTermsSet = proto.Field( + proto.MESSAGE, + number=3, + oneof="data", + message=GlossaryTermsSet, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + + +class GlossaryTerm(proto.Message): + r"""Represents a single glossary term + + Attributes: + language_code (str): + The language for this glossary term. + text (str): + The text for the glossary term. + """ + + language_code: str = proto.Field( + proto.STRING, + number=1, + ) + text: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/types/translation_service.py b/packages/google-cloud-translate/google/cloud/translate_v3/types/translation_service.py index dbce197931d4..8cca1d3d3dbd 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/types/translation_service.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/types/translation_service.py @@ -17,16 +17,22 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore +from google.cloud.translate_v3.types import common + __protobuf__ = proto.module( package="google.cloud.translation.v3", manifest={ - "TranslateTextGlossaryConfig", + "TransliterationConfig", "TranslateTextRequest", "TranslateTextResponse", "Translation", + "RomanizeTextRequest", + "Romanization", + "RomanizeTextResponse", "DetectLanguageRequest", "DetectedLanguage", "DetectLanguageResponse", @@ -48,11 +54,19 @@ "GlossaryInputConfig", "Glossary", "CreateGlossaryRequest", + "UpdateGlossaryRequest", "GetGlossaryRequest", "DeleteGlossaryRequest", "ListGlossariesRequest", "ListGlossariesResponse", + "GetGlossaryEntryRequest", + "DeleteGlossaryEntryRequest", + "ListGlossaryEntriesRequest", + "ListGlossaryEntriesResponse", + "CreateGlossaryEntryRequest", + "UpdateGlossaryEntryRequest", "CreateGlossaryMetadata", + "UpdateGlossaryMetadata", "DeleteGlossaryMetadata", "DeleteGlossaryResponse", "BatchTranslateDocumentRequest", @@ -60,35 +74,23 @@ "BatchDocumentOutputConfig", "BatchTranslateDocumentResponse", "BatchTranslateDocumentMetadata", + "TranslateTextGlossaryConfig", }, ) -class TranslateTextGlossaryConfig(proto.Message): - r"""Configures which glossary is used for a specific target - language and defines options for applying that glossary. +class TransliterationConfig(proto.Message): + r"""Configures transliteration feature on top of translation. Attributes: - glossary (str): - Required. The ``glossary`` to be applied for this - translation. - - The format depends on the glossary: - - - User-provided custom glossary: - ``projects/{project-number-or-id}/locations/{location-id}/glossaries/{glossary-id}`` - ignore_case (bool): - Optional. Indicates match is case insensitive. The default - value is ``false`` if missing. + enable_transliteration (bool): + If true, source text in romanized form can be + translated to the target language. """ - glossary: str = proto.Field( - proto.STRING, - number=1, - ) - ignore_case: bool = proto.Field( + enable_transliteration: bool = proto.Field( proto.BOOL, - number=2, + number=1, ) @@ -146,6 +148,9 @@ class TranslateTextRequest(proto.Message): - General (built-in) models: ``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``, + - Translation LLM models: + ``projects/{project-number-or-id}/locations/{location-id}/models/general/translation-llm``, + For global (non-regionalized) requests, use ``location-id`` ``global``. For example, ``projects/{project-number-or-id}/locations/global/models/general/nmt``. @@ -156,6 +161,8 @@ class TranslateTextRequest(proto.Message): within the same region (have the same location-id) as the model, otherwise an INVALID_ARGUMENT (400) error is returned. + transliteration_config (google.cloud.translate_v3.types.TransliterationConfig): + Optional. Transliteration to be applied. labels (MutableMapping[str, str]): Optional. The labels with user-defined metadata for the request. @@ -200,6 +207,11 @@ class TranslateTextRequest(proto.Message): number=7, message="TranslateTextGlossaryConfig", ) + transliteration_config: "TransliterationConfig" = proto.Field( + proto.MESSAGE, + number=13, + message="TransliterationConfig", + ) labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, @@ -282,6 +294,91 @@ class Translation(proto.Message): ) +class RomanizeTextRequest(proto.Message): + r"""The request message for synchronous romanization. + + Attributes: + parent (str): + Required. Project or location to make a call. Must refer to + a caller's project. + + Format: + ``projects/{project-number-or-id}/locations/{location-id}`` + or ``projects/{project-number-or-id}``. + + For global calls, use + ``projects/{project-number-or-id}/locations/global`` or + ``projects/{project-number-or-id}``. + contents (MutableSequence[str]): + Required. The content of the input in string + format. + source_language_code (str): + Optional. The ISO-639 language code of the + input text if known, for example, "hi" or "zh". + If the source language isn't specified, the API + attempts to identify the source language + automatically and returns the source language + for each content in the response. + """ + + parent: str = proto.Field( + proto.STRING, + number=4, + ) + contents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + source_language_code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Romanization(proto.Message): + r"""A single romanization response. + + Attributes: + romanized_text (str): + Romanized text. + If an error occurs during romanization, this + field might be excluded from the response. + detected_language_code (str): + The ISO-639 language code of source text in + the initial request, detected automatically, if + no source language was passed within the initial + request. If the source language was passed, + auto-detection of the language does not occur + and this field is empty. + """ + + romanized_text: str = proto.Field( + proto.STRING, + number=1, + ) + detected_language_code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RomanizeTextResponse(proto.Message): + r"""The response message for synchronous romanization. + + Attributes: + romanizations (MutableSequence[google.cloud.translate_v3.types.Romanization]): + Text romanization responses. This field has the same length + as + [``contents``][google.cloud.translation.v3.RomanizeTextRequest.contents]. + """ + + romanizations: MutableSequence["Romanization"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Romanization", + ) + + class DetectLanguageRequest(proto.Message): r"""The request message for language detection. @@ -1467,6 +1564,29 @@ class CreateGlossaryRequest(proto.Message): ) +class UpdateGlossaryRequest(proto.Message): + r"""Request message for the update glossary flow + + Attributes: + glossary (google.cloud.translate_v3.types.Glossary): + Required. The glossary entry to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be updated. Currently only + ``display_name`` and 'input_config' + """ + + glossary: "Glossary" = proto.Field( + proto.MESSAGE, + number=1, + message="Glossary", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + class GetGlossaryRequest(proto.Message): r"""Request message for GetGlossary. @@ -1587,6 +1707,134 @@ def raw_page(self): ) +class GetGlossaryEntryRequest(proto.Message): + r"""Request message for the Get Glossary Entry Api + + Attributes: + name (str): + Required. The resource name of the glossary + entry to get + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteGlossaryEntryRequest(proto.Message): + r"""Request message for Delete Glossary Entry + + Attributes: + name (str): + Required. The resource name of the glossary + entry to delete + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListGlossaryEntriesRequest(proto.Message): + r"""Request message for ListGlossaryEntries + + Attributes: + parent (str): + Required. The parent glossary resource name + for listing the glossary's entries. + page_size (int): + Optional. Requested page size. The server may + return fewer glossary entries than requested. If + unspecified, the server picks an appropriate + default. + page_token (str): + Optional. A token identifying a page of results the server + should return. Typically, this is the value of + [ListGlossaryEntriesResponse.next_page_token] returned from + the previous call. The first page is returned if + ``page_token``\ is empty or missing. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListGlossaryEntriesResponse(proto.Message): + r"""Response message for ListGlossaryEntries + + Attributes: + glossary_entries (MutableSequence[google.cloud.translate_v3.types.GlossaryEntry]): + Optional. The Glossary Entries + next_page_token (str): + Optional. A token to retrieve a page of results. Pass this + value in the [ListGLossaryEntriesRequest.page_token] field + in the subsequent calls. + """ + + @property + def raw_page(self): + return self + + glossary_entries: MutableSequence[common.GlossaryEntry] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=common.GlossaryEntry, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateGlossaryEntryRequest(proto.Message): + r"""Request message for CreateGlossaryEntry + + Attributes: + parent (str): + Required. The resource name of the glossary + to create the entry under. + glossary_entry (google.cloud.translate_v3.types.GlossaryEntry): + Required. The glossary entry to create + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + glossary_entry: common.GlossaryEntry = proto.Field( + proto.MESSAGE, + number=2, + message=common.GlossaryEntry, + ) + + +class UpdateGlossaryEntryRequest(proto.Message): + r"""Request message for UpdateGlossaryEntry + + Attributes: + glossary_entry (google.cloud.translate_v3.types.GlossaryEntry): + Required. The glossary entry to update. + """ + + glossary_entry: common.GlossaryEntry = proto.Field( + proto.MESSAGE, + number=1, + message=common.GlossaryEntry, + ) + + class CreateGlossaryMetadata(proto.Message): r"""Stored in the [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata] @@ -1649,6 +1897,69 @@ class State(proto.Enum): ) +class UpdateGlossaryMetadata(proto.Message): + r"""Stored in the + [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata] + field returned by UpdateGlossary. + + Attributes: + glossary (google.cloud.translate_v3.types.Glossary): + The updated glossary object. + state (google.cloud.translate_v3.types.UpdateGlossaryMetadata.State): + The current state of the glossary update + operation. If the glossary input file was not + updated this will be completed immediately + submit_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the operation was submitted to + the server. + """ + + class State(proto.Enum): + r"""Enumerates the possible states that the update request can be + in. + + Values: + STATE_UNSPECIFIED (0): + Invalid. + RUNNING (1): + Request is being processed. + SUCCEEDED (2): + The glossary was successfully updated. + FAILED (3): + Failed to update the glossary. + CANCELLING (4): + Request is in the process of being canceled + after caller invoked + longrunning.Operations.CancelOperation on the + request id. + CANCELLED (5): + The glossary update request was successfully + canceled. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + SUCCEEDED = 2 + FAILED = 3 + CANCELLING = 4 + CANCELLED = 5 + + glossary: "Glossary" = proto.Field( + proto.MESSAGE, + number=1, + message="Glossary", + ) + state: State = proto.Field( + proto.ENUM, + number=2, + enum=State, + ) + submit_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + class DeleteGlossaryMetadata(proto.Message): r"""Stored in the [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata] @@ -2193,4 +2504,32 @@ class State(proto.Enum): ) +class TranslateTextGlossaryConfig(proto.Message): + r"""Configures which glossary is used for a specific target + language and defines options for applying that glossary. + + Attributes: + glossary (str): + Required. The ``glossary`` to be applied for this + translation. + + The format depends on the glossary: + + - User-provided custom glossary: + ``projects/{project-number-or-id}/locations/{location-id}/glossaries/{glossary-id}`` + ignore_case (bool): + Optional. Indicates match is case insensitive. The default + value is ``false`` if missing. + """ + + glossary: str = proto.Field( + proto.STRING, + number=1, + ) + ignore_case: bool = proto.Field( + proto.BOOL, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/gapic_version.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/gapic_version.py index 71a07a06cd93..558c8aab67c5 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/gapic_version.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3.json b/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3.json index f2dc4068aa05..b2041c071341 100644 --- a/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3.json +++ b/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-translate", - "version": "3.15.5" + "version": "0.1.0" }, "snippets": [ { @@ -703,27 +703,27 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.create_glossary", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.create_dataset", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.CreateGlossary", + "fullName": "google.cloud.translation.v3.TranslationService.CreateDataset", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "CreateGlossary" + "shortName": "CreateDataset" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.CreateGlossaryRequest" + "type": "google.cloud.translate_v3.types.CreateDatasetRequest" }, { "name": "parent", "type": "str" }, { - "name": "glossary", - "type": "google.cloud.translate_v3.types.Glossary" + "name": "dataset", + "type": "google.cloud.translate_v3.types.Dataset" }, { "name": "retry", @@ -739,21 +739,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_glossary" + "shortName": "create_dataset" }, - "description": "Sample for CreateGlossary", - "file": "translate_v3_generated_translation_service_create_glossary_async.py", + "description": "Sample for CreateDataset", + "file": "translate_v3_generated_translation_service_create_dataset_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_CreateGlossary_async", + "regionTag": "translate_v3_generated_TranslationService_CreateDataset_async", "segments": [ { - "end": 59, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 55, "start": 27, "type": "SHORT" }, @@ -763,22 +763,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_create_glossary_async.py" + "title": "translate_v3_generated_translation_service_create_dataset_async.py" }, { "canonical": true, @@ -787,27 +787,27 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.create_glossary", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.create_dataset", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.CreateGlossary", + "fullName": "google.cloud.translation.v3.TranslationService.CreateDataset", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "CreateGlossary" + "shortName": "CreateDataset" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.CreateGlossaryRequest" + "type": "google.cloud.translate_v3.types.CreateDatasetRequest" }, { "name": "parent", "type": "str" }, { - "name": "glossary", - "type": "google.cloud.translate_v3.types.Glossary" + "name": "dataset", + "type": "google.cloud.translate_v3.types.Dataset" }, { "name": "retry", @@ -823,21 +823,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_glossary" + "shortName": "create_dataset" }, - "description": "Sample for CreateGlossary", - "file": "translate_v3_generated_translation_service_create_glossary_sync.py", + "description": "Sample for CreateDataset", + "file": "translate_v3_generated_translation_service_create_dataset_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_CreateGlossary_sync", + "regionTag": "translate_v3_generated_TranslationService_CreateDataset_sync", "segments": [ { - "end": 59, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 55, "start": 27, "type": "SHORT" }, @@ -847,22 +847,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_create_glossary_sync.py" + "title": "translate_v3_generated_translation_service_create_dataset_sync.py" }, { "canonical": true, @@ -872,24 +872,28 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.delete_adaptive_mt_dataset", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.create_glossary_entry", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.DeleteAdaptiveMtDataset", + "fullName": "google.cloud.translation.v3.TranslationService.CreateGlossaryEntry", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "DeleteAdaptiveMtDataset" + "shortName": "CreateGlossaryEntry" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.DeleteAdaptiveMtDatasetRequest" + "type": "google.cloud.translate_v3.types.CreateGlossaryEntryRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "glossary_entry", + "type": "google.cloud.translate_v3.types.GlossaryEntry" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -903,21 +907,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_adaptive_mt_dataset" + "resultType": "google.cloud.translate_v3.types.GlossaryEntry", + "shortName": "create_glossary_entry" }, - "description": "Sample for DeleteAdaptiveMtDataset", - "file": "translate_v3_generated_translation_service_delete_adaptive_mt_dataset_async.py", + "description": "Sample for CreateGlossaryEntry", + "file": "translate_v3_generated_translation_service_create_glossary_entry_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_DeleteAdaptiveMtDataset_async", + "regionTag": "translate_v3_generated_TranslationService_CreateGlossaryEntry_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -932,15 +937,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_delete_adaptive_mt_dataset_async.py" + "title": "translate_v3_generated_translation_service_create_glossary_entry_async.py" }, { "canonical": true, @@ -949,24 +956,28 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.delete_adaptive_mt_dataset", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.create_glossary_entry", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.DeleteAdaptiveMtDataset", + "fullName": "google.cloud.translation.v3.TranslationService.CreateGlossaryEntry", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "DeleteAdaptiveMtDataset" + "shortName": "CreateGlossaryEntry" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.DeleteAdaptiveMtDatasetRequest" + "type": "google.cloud.translate_v3.types.CreateGlossaryEntryRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "glossary_entry", + "type": "google.cloud.translate_v3.types.GlossaryEntry" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -980,21 +991,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_adaptive_mt_dataset" + "resultType": "google.cloud.translate_v3.types.GlossaryEntry", + "shortName": "create_glossary_entry" }, - "description": "Sample for DeleteAdaptiveMtDataset", - "file": "translate_v3_generated_translation_service_delete_adaptive_mt_dataset_sync.py", + "description": "Sample for CreateGlossaryEntry", + "file": "translate_v3_generated_translation_service_create_glossary_entry_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_DeleteAdaptiveMtDataset_sync", + "regionTag": "translate_v3_generated_TranslationService_CreateGlossaryEntry_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1009,15 +1021,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_delete_adaptive_mt_dataset_sync.py" + "title": "translate_v3_generated_translation_service_create_glossary_entry_sync.py" }, { "canonical": true, @@ -1027,24 +1041,28 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.delete_adaptive_mt_file", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.create_glossary", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.DeleteAdaptiveMtFile", + "fullName": "google.cloud.translation.v3.TranslationService.CreateGlossary", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "DeleteAdaptiveMtFile" + "shortName": "CreateGlossary" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.DeleteAdaptiveMtFileRequest" + "type": "google.cloud.translate_v3.types.CreateGlossaryRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "glossary", + "type": "google.cloud.translate_v3.types.Glossary" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1058,21 +1076,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_adaptive_mt_file" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_glossary" }, - "description": "Sample for DeleteAdaptiveMtFile", - "file": "translate_v3_generated_translation_service_delete_adaptive_mt_file_async.py", + "description": "Sample for CreateGlossary", + "file": "translate_v3_generated_translation_service_create_glossary_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_DeleteAdaptiveMtFile_async", + "regionTag": "translate_v3_generated_TranslationService_CreateGlossary_async", "segments": [ { - "end": 49, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 59, "start": 27, "type": "SHORT" }, @@ -1082,20 +1101,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_delete_adaptive_mt_file_async.py" + "title": "translate_v3_generated_translation_service_create_glossary_async.py" }, { "canonical": true, @@ -1104,24 +1125,28 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.delete_adaptive_mt_file", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.create_glossary", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.DeleteAdaptiveMtFile", + "fullName": "google.cloud.translation.v3.TranslationService.CreateGlossary", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "DeleteAdaptiveMtFile" + "shortName": "CreateGlossary" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.DeleteAdaptiveMtFileRequest" + "type": "google.cloud.translate_v3.types.CreateGlossaryRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "glossary", + "type": "google.cloud.translate_v3.types.Glossary" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1135,21 +1160,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_adaptive_mt_file" + "resultType": "google.api_core.operation.Operation", + "shortName": "create_glossary" }, - "description": "Sample for DeleteAdaptiveMtFile", - "file": "translate_v3_generated_translation_service_delete_adaptive_mt_file_sync.py", + "description": "Sample for CreateGlossary", + "file": "translate_v3_generated_translation_service_create_glossary_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_DeleteAdaptiveMtFile_sync", + "regionTag": "translate_v3_generated_TranslationService_CreateGlossary_sync", "segments": [ { - "end": 49, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 59, "start": 27, "type": "SHORT" }, @@ -1159,20 +1185,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_delete_adaptive_mt_file_sync.py" + "title": "translate_v3_generated_translation_service_create_glossary_sync.py" }, { "canonical": true, @@ -1182,24 +1210,28 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.delete_glossary", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.create_model", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.DeleteGlossary", + "fullName": "google.cloud.translation.v3.TranslationService.CreateModel", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "DeleteGlossary" + "shortName": "CreateModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.DeleteGlossaryRequest" + "type": "google.cloud.translate_v3.types.CreateModelRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "model", + "type": "google.cloud.translate_v3.types.Model" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1214,13 +1246,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_glossary" + "shortName": "create_model" }, - "description": "Sample for DeleteGlossary", - "file": "translate_v3_generated_translation_service_delete_glossary_async.py", + "description": "Sample for CreateModel", + "file": "translate_v3_generated_translation_service_create_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_DeleteGlossary_async", + "regionTag": "translate_v3_generated_TranslationService_CreateModel_async", "segments": [ { "end": 55, @@ -1253,7 +1285,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_delete_glossary_async.py" + "title": "translate_v3_generated_translation_service_create_model_async.py" }, { "canonical": true, @@ -1262,24 +1294,28 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.delete_glossary", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.create_model", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.DeleteGlossary", + "fullName": "google.cloud.translation.v3.TranslationService.CreateModel", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "DeleteGlossary" + "shortName": "CreateModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.DeleteGlossaryRequest" + "type": "google.cloud.translate_v3.types.CreateModelRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "model", + "type": "google.cloud.translate_v3.types.Model" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1294,13 +1330,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_glossary" + "shortName": "create_model" }, - "description": "Sample for DeleteGlossary", - "file": "translate_v3_generated_translation_service_delete_glossary_sync.py", + "description": "Sample for CreateModel", + "file": "translate_v3_generated_translation_service_create_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_DeleteGlossary_sync", + "regionTag": "translate_v3_generated_TranslationService_CreateModel_sync", "segments": [ { "end": 55, @@ -1333,7 +1369,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_delete_glossary_sync.py" + "title": "translate_v3_generated_translation_service_create_model_sync.py" }, { "canonical": true, @@ -1343,34 +1379,22 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.detect_language", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.delete_adaptive_mt_dataset", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.DetectLanguage", + "fullName": "google.cloud.translation.v3.TranslationService.DeleteAdaptiveMtDataset", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "DetectLanguage" + "shortName": "DeleteAdaptiveMtDataset" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.DetectLanguageRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "model", - "type": "str" - }, - { - "name": "mime_type", - "type": "str" + "type": "google.cloud.translate_v3.types.DeleteAdaptiveMtDatasetRequest" }, { - "name": "content", + "name": "name", "type": "str" }, { @@ -1386,22 +1410,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.DetectLanguageResponse", - "shortName": "detect_language" + "shortName": "delete_adaptive_mt_dataset" }, - "description": "Sample for DetectLanguage", - "file": "translate_v3_generated_translation_service_detect_language_async.py", + "description": "Sample for DeleteAdaptiveMtDataset", + "file": "translate_v3_generated_translation_service_delete_adaptive_mt_dataset_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_DetectLanguage_async", + "regionTag": "translate_v3_generated_TranslationService_DeleteAdaptiveMtDataset_async", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1411,22 +1434,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_detect_language_async.py" + "title": "translate_v3_generated_translation_service_delete_adaptive_mt_dataset_async.py" }, { "canonical": true, @@ -1435,39 +1456,27 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.detect_language", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.delete_adaptive_mt_dataset", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.DetectLanguage", + "fullName": "google.cloud.translation.v3.TranslationService.DeleteAdaptiveMtDataset", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "DetectLanguage" + "shortName": "DeleteAdaptiveMtDataset" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.DetectLanguageRequest" + "type": "google.cloud.translate_v3.types.DeleteAdaptiveMtDatasetRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { - "name": "model", - "type": "str" - }, - { - "name": "mime_type", - "type": "str" - }, - { - "name": "content", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" + "name": "retry", + "type": "google.api_core.retry.Retry" }, { "name": "timeout", @@ -1478,22 +1487,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.DetectLanguageResponse", - "shortName": "detect_language" + "shortName": "delete_adaptive_mt_dataset" }, - "description": "Sample for DetectLanguage", - "file": "translate_v3_generated_translation_service_detect_language_sync.py", + "description": "Sample for DeleteAdaptiveMtDataset", + "file": "translate_v3_generated_translation_service_delete_adaptive_mt_dataset_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_DetectLanguage_sync", + "regionTag": "translate_v3_generated_TranslationService_DeleteAdaptiveMtDataset_sync", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1503,22 +1511,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_detect_language_sync.py" + "title": "translate_v3_generated_translation_service_delete_adaptive_mt_dataset_sync.py" }, { "canonical": true, @@ -1528,19 +1534,19 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_adaptive_mt_dataset", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.delete_adaptive_mt_file", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.GetAdaptiveMtDataset", + "fullName": "google.cloud.translation.v3.TranslationService.DeleteAdaptiveMtFile", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "GetAdaptiveMtDataset" + "shortName": "DeleteAdaptiveMtFile" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.GetAdaptiveMtDatasetRequest" + "type": "google.cloud.translate_v3.types.DeleteAdaptiveMtFileRequest" }, { "name": "name", @@ -1559,22 +1565,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.AdaptiveMtDataset", - "shortName": "get_adaptive_mt_dataset" + "shortName": "delete_adaptive_mt_file" }, - "description": "Sample for GetAdaptiveMtDataset", - "file": "translate_v3_generated_translation_service_get_adaptive_mt_dataset_async.py", + "description": "Sample for DeleteAdaptiveMtFile", + "file": "translate_v3_generated_translation_service_delete_adaptive_mt_file_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_GetAdaptiveMtDataset_async", + "regionTag": "translate_v3_generated_TranslationService_DeleteAdaptiveMtFile_async", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1589,17 +1594,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_get_adaptive_mt_dataset_async.py" + "title": "translate_v3_generated_translation_service_delete_adaptive_mt_file_async.py" }, { "canonical": true, @@ -1608,19 +1611,19 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_adaptive_mt_dataset", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.delete_adaptive_mt_file", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.GetAdaptiveMtDataset", + "fullName": "google.cloud.translation.v3.TranslationService.DeleteAdaptiveMtFile", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "GetAdaptiveMtDataset" + "shortName": "DeleteAdaptiveMtFile" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.GetAdaptiveMtDatasetRequest" + "type": "google.cloud.translate_v3.types.DeleteAdaptiveMtFileRequest" }, { "name": "name", @@ -1639,22 +1642,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.AdaptiveMtDataset", - "shortName": "get_adaptive_mt_dataset" + "shortName": "delete_adaptive_mt_file" }, - "description": "Sample for GetAdaptiveMtDataset", - "file": "translate_v3_generated_translation_service_get_adaptive_mt_dataset_sync.py", + "description": "Sample for DeleteAdaptiveMtFile", + "file": "translate_v3_generated_translation_service_delete_adaptive_mt_file_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_GetAdaptiveMtDataset_sync", + "regionTag": "translate_v3_generated_TranslationService_DeleteAdaptiveMtFile_sync", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1669,17 +1671,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_get_adaptive_mt_dataset_sync.py" + "title": "translate_v3_generated_translation_service_delete_adaptive_mt_file_sync.py" }, { "canonical": true, @@ -1689,19 +1689,19 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_adaptive_mt_file", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.delete_dataset", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.GetAdaptiveMtFile", + "fullName": "google.cloud.translation.v3.TranslationService.DeleteDataset", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "GetAdaptiveMtFile" + "shortName": "DeleteDataset" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.GetAdaptiveMtFileRequest" + "type": "google.cloud.translate_v3.types.DeleteDatasetRequest" }, { "name": "name", @@ -1720,22 +1720,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.AdaptiveMtFile", - "shortName": "get_adaptive_mt_file" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_dataset" }, - "description": "Sample for GetAdaptiveMtFile", - "file": "translate_v3_generated_translation_service_get_adaptive_mt_file_async.py", + "description": "Sample for DeleteDataset", + "file": "translate_v3_generated_translation_service_delete_dataset_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_GetAdaptiveMtFile_async", + "regionTag": "translate_v3_generated_TranslationService_DeleteDataset_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1750,17 +1750,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_get_adaptive_mt_file_async.py" + "title": "translate_v3_generated_translation_service_delete_dataset_async.py" }, { "canonical": true, @@ -1769,19 +1769,19 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_adaptive_mt_file", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.delete_dataset", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.GetAdaptiveMtFile", + "fullName": "google.cloud.translation.v3.TranslationService.DeleteDataset", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "GetAdaptiveMtFile" + "shortName": "DeleteDataset" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.GetAdaptiveMtFileRequest" + "type": "google.cloud.translate_v3.types.DeleteDatasetRequest" }, { "name": "name", @@ -1800,22 +1800,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.AdaptiveMtFile", - "shortName": "get_adaptive_mt_file" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_dataset" }, - "description": "Sample for GetAdaptiveMtFile", - "file": "translate_v3_generated_translation_service_get_adaptive_mt_file_sync.py", + "description": "Sample for DeleteDataset", + "file": "translate_v3_generated_translation_service_delete_dataset_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_GetAdaptiveMtFile_sync", + "regionTag": "translate_v3_generated_TranslationService_DeleteDataset_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1830,17 +1830,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_get_adaptive_mt_file_sync.py" + "title": "translate_v3_generated_translation_service_delete_dataset_sync.py" }, { "canonical": true, @@ -1850,19 +1850,19 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_glossary", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.delete_glossary_entry", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.GetGlossary", + "fullName": "google.cloud.translation.v3.TranslationService.DeleteGlossaryEntry", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "GetGlossary" + "shortName": "DeleteGlossaryEntry" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.GetGlossaryRequest" + "type": "google.cloud.translate_v3.types.DeleteGlossaryEntryRequest" }, { "name": "name", @@ -1881,22 +1881,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.Glossary", - "shortName": "get_glossary" + "shortName": "delete_glossary_entry" }, - "description": "Sample for GetGlossary", - "file": "translate_v3_generated_translation_service_get_glossary_async.py", + "description": "Sample for DeleteGlossaryEntry", + "file": "translate_v3_generated_translation_service_delete_glossary_entry_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_GetGlossary_async", + "regionTag": "translate_v3_generated_TranslationService_DeleteGlossaryEntry_async", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1911,17 +1910,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_get_glossary_async.py" + "title": "translate_v3_generated_translation_service_delete_glossary_entry_async.py" }, { "canonical": true, @@ -1930,19 +1927,19 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_glossary", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.delete_glossary_entry", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.GetGlossary", + "fullName": "google.cloud.translation.v3.TranslationService.DeleteGlossaryEntry", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "GetGlossary" + "shortName": "DeleteGlossaryEntry" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.GetGlossaryRequest" + "type": "google.cloud.translate_v3.types.DeleteGlossaryEntryRequest" }, { "name": "name", @@ -1961,22 +1958,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.Glossary", - "shortName": "get_glossary" + "shortName": "delete_glossary_entry" }, - "description": "Sample for GetGlossary", - "file": "translate_v3_generated_translation_service_get_glossary_sync.py", + "description": "Sample for DeleteGlossaryEntry", + "file": "translate_v3_generated_translation_service_delete_glossary_entry_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_GetGlossary_sync", + "regionTag": "translate_v3_generated_TranslationService_DeleteGlossaryEntry_sync", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1991,17 +1987,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_get_glossary_sync.py" + "title": "translate_v3_generated_translation_service_delete_glossary_entry_sync.py" }, { "canonical": true, @@ -2011,30 +2005,22 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_supported_languages", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.delete_glossary", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.GetSupportedLanguages", + "fullName": "google.cloud.translation.v3.TranslationService.DeleteGlossary", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "GetSupportedLanguages" + "shortName": "DeleteGlossary" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.GetSupportedLanguagesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "model", - "type": "str" + "type": "google.cloud.translate_v3.types.DeleteGlossaryRequest" }, { - "name": "display_language_code", + "name": "name", "type": "str" }, { @@ -2050,22 +2036,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.SupportedLanguages", - "shortName": "get_supported_languages" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_glossary" }, - "description": "Sample for GetSupportedLanguages", - "file": "translate_v3_generated_translation_service_get_supported_languages_async.py", + "description": "Sample for DeleteGlossary", + "file": "translate_v3_generated_translation_service_delete_glossary_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_GetSupportedLanguages_async", + "regionTag": "translate_v3_generated_TranslationService_DeleteGlossary_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2080,17 +2066,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_get_supported_languages_async.py" + "title": "translate_v3_generated_translation_service_delete_glossary_async.py" }, { "canonical": true, @@ -2099,30 +2085,22 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_supported_languages", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.delete_glossary", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.GetSupportedLanguages", + "fullName": "google.cloud.translation.v3.TranslationService.DeleteGlossary", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "GetSupportedLanguages" + "shortName": "DeleteGlossary" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.GetSupportedLanguagesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "model", - "type": "str" + "type": "google.cloud.translate_v3.types.DeleteGlossaryRequest" }, { - "name": "display_language_code", + "name": "name", "type": "str" }, { @@ -2138,22 +2116,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.SupportedLanguages", - "shortName": "get_supported_languages" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_glossary" }, - "description": "Sample for GetSupportedLanguages", - "file": "translate_v3_generated_translation_service_get_supported_languages_sync.py", + "description": "Sample for DeleteGlossary", + "file": "translate_v3_generated_translation_service_delete_glossary_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_GetSupportedLanguages_sync", + "regionTag": "translate_v3_generated_TranslationService_DeleteGlossary_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2168,17 +2146,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_get_supported_languages_sync.py" + "title": "translate_v3_generated_translation_service_delete_glossary_sync.py" }, { "canonical": true, @@ -2188,22 +2166,22 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.import_adaptive_mt_file", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.delete_model", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.ImportAdaptiveMtFile", + "fullName": "google.cloud.translation.v3.TranslationService.DeleteModel", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "ImportAdaptiveMtFile" + "shortName": "DeleteModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.ImportAdaptiveMtFileRequest" + "type": "google.cloud.translate_v3.types.DeleteModelRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -2219,22 +2197,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.ImportAdaptiveMtFileResponse", - "shortName": "import_adaptive_mt_file" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_model" }, - "description": "Sample for ImportAdaptiveMtFile", - "file": "translate_v3_generated_translation_service_import_adaptive_mt_file_async.py", + "description": "Sample for DeleteModel", + "file": "translate_v3_generated_translation_service_delete_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_ImportAdaptiveMtFile_async", + "regionTag": "translate_v3_generated_TranslationService_DeleteModel_async", "segments": [ { - "end": 57, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 57, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2244,22 +2222,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 54, - "start": 52, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 58, - "start": 55, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_import_adaptive_mt_file_async.py" + "title": "translate_v3_generated_translation_service_delete_model_async.py" }, { "canonical": true, @@ -2268,22 +2246,22 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.import_adaptive_mt_file", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.delete_model", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.ImportAdaptiveMtFile", + "fullName": "google.cloud.translation.v3.TranslationService.DeleteModel", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "ImportAdaptiveMtFile" + "shortName": "DeleteModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.ImportAdaptiveMtFileRequest" + "type": "google.cloud.translate_v3.types.DeleteModelRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -2299,22 +2277,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.ImportAdaptiveMtFileResponse", - "shortName": "import_adaptive_mt_file" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_model" }, - "description": "Sample for ImportAdaptiveMtFile", - "file": "translate_v3_generated_translation_service_import_adaptive_mt_file_sync.py", + "description": "Sample for DeleteModel", + "file": "translate_v3_generated_translation_service_delete_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_ImportAdaptiveMtFile_sync", + "regionTag": "translate_v3_generated_TranslationService_DeleteModel_sync", "segments": [ { - "end": 57, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 57, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2324,22 +2302,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 54, - "start": 52, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 58, - "start": 55, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_import_adaptive_mt_file_sync.py" + "title": "translate_v3_generated_translation_service_delete_model_sync.py" }, { "canonical": true, @@ -2349,24 +2327,36 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.list_adaptive_mt_datasets", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.detect_language", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtDatasets", + "fullName": "google.cloud.translation.v3.TranslationService.DetectLanguage", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "ListAdaptiveMtDatasets" + "shortName": "DetectLanguage" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.ListAdaptiveMtDatasetsRequest" + "type": "google.cloud.translate_v3.types.DetectLanguageRequest" }, { "name": "parent", "type": "str" }, + { + "name": "model", + "type": "str" + }, + { + "name": "mime_type", + "type": "str" + }, + { + "name": "content", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2380,14 +2370,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtDatasetsAsyncPager", - "shortName": "list_adaptive_mt_datasets" + "resultType": "google.cloud.translate_v3.types.DetectLanguageResponse", + "shortName": "detect_language" }, - "description": "Sample for ListAdaptiveMtDatasets", - "file": "translate_v3_generated_translation_service_list_adaptive_mt_datasets_async.py", + "description": "Sample for DetectLanguage", + "file": "translate_v3_generated_translation_service_detect_language_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtDatasets_async", + "regionTag": "translate_v3_generated_TranslationService_DetectLanguage_async", "segments": [ { "end": 52, @@ -2405,22 +2395,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_list_adaptive_mt_datasets_async.py" + "title": "translate_v3_generated_translation_service_detect_language_async.py" }, { "canonical": true, @@ -2429,24 +2419,36 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.list_adaptive_mt_datasets", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.detect_language", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtDatasets", + "fullName": "google.cloud.translation.v3.TranslationService.DetectLanguage", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "ListAdaptiveMtDatasets" + "shortName": "DetectLanguage" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.ListAdaptiveMtDatasetsRequest" + "type": "google.cloud.translate_v3.types.DetectLanguageRequest" }, { "name": "parent", "type": "str" }, + { + "name": "model", + "type": "str" + }, + { + "name": "mime_type", + "type": "str" + }, + { + "name": "content", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2460,14 +2462,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtDatasetsPager", - "shortName": "list_adaptive_mt_datasets" + "resultType": "google.cloud.translate_v3.types.DetectLanguageResponse", + "shortName": "detect_language" }, - "description": "Sample for ListAdaptiveMtDatasets", - "file": "translate_v3_generated_translation_service_list_adaptive_mt_datasets_sync.py", + "description": "Sample for DetectLanguage", + "file": "translate_v3_generated_translation_service_detect_language_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtDatasets_sync", + "regionTag": "translate_v3_generated_TranslationService_DetectLanguage_sync", "segments": [ { "end": 52, @@ -2485,22 +2487,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_list_adaptive_mt_datasets_sync.py" + "title": "translate_v3_generated_translation_service_detect_language_sync.py" }, { "canonical": true, @@ -2510,24 +2512,28 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.list_adaptive_mt_files", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.export_data", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtFiles", + "fullName": "google.cloud.translation.v3.TranslationService.ExportData", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "ListAdaptiveMtFiles" + "shortName": "ExportData" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.ListAdaptiveMtFilesRequest" + "type": "google.cloud.translate_v3.types.ExportDataRequest" }, { - "name": "parent", + "name": "dataset", "type": "str" }, + { + "name": "output_config", + "type": "google.cloud.translate_v3.types.DatasetOutputConfig" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2541,22 +2547,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtFilesAsyncPager", - "shortName": "list_adaptive_mt_files" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_data" }, - "description": "Sample for ListAdaptiveMtFiles", - "file": "translate_v3_generated_translation_service_list_adaptive_mt_files_async.py", + "description": "Sample for ExportData", + "file": "translate_v3_generated_translation_service_export_data_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtFiles_async", + "regionTag": "translate_v3_generated_TranslationService_ExportData_async", "segments": [ { - "end": 52, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 59, "start": 27, "type": "SHORT" }, @@ -2566,22 +2572,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_list_adaptive_mt_files_async.py" + "title": "translate_v3_generated_translation_service_export_data_async.py" }, { "canonical": true, @@ -2590,24 +2596,28 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.list_adaptive_mt_files", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.export_data", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtFiles", + "fullName": "google.cloud.translation.v3.TranslationService.ExportData", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "ListAdaptiveMtFiles" + "shortName": "ExportData" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.ListAdaptiveMtFilesRequest" + "type": "google.cloud.translate_v3.types.ExportDataRequest" }, { - "name": "parent", + "name": "dataset", "type": "str" }, + { + "name": "output_config", + "type": "google.cloud.translate_v3.types.DatasetOutputConfig" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2621,22 +2631,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtFilesPager", - "shortName": "list_adaptive_mt_files" + "resultType": "google.api_core.operation.Operation", + "shortName": "export_data" }, - "description": "Sample for ListAdaptiveMtFiles", - "file": "translate_v3_generated_translation_service_list_adaptive_mt_files_sync.py", + "description": "Sample for ExportData", + "file": "translate_v3_generated_translation_service_export_data_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtFiles_sync", + "regionTag": "translate_v3_generated_TranslationService_ExportData_sync", "segments": [ { - "end": 52, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 59, "start": 27, "type": "SHORT" }, @@ -2646,22 +2656,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_list_adaptive_mt_files_sync.py" + "title": "translate_v3_generated_translation_service_export_data_sync.py" }, { "canonical": true, @@ -2671,22 +2681,22 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.list_adaptive_mt_sentences", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_adaptive_mt_dataset", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtSentences", + "fullName": "google.cloud.translation.v3.TranslationService.GetAdaptiveMtDataset", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "ListAdaptiveMtSentences" + "shortName": "GetAdaptiveMtDataset" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.ListAdaptiveMtSentencesRequest" + "type": "google.cloud.translate_v3.types.GetAdaptiveMtDatasetRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -2702,22 +2712,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtSentencesAsyncPager", - "shortName": "list_adaptive_mt_sentences" + "resultType": "google.cloud.translate_v3.types.AdaptiveMtDataset", + "shortName": "get_adaptive_mt_dataset" }, - "description": "Sample for ListAdaptiveMtSentences", - "file": "translate_v3_generated_translation_service_list_adaptive_mt_sentences_async.py", + "description": "Sample for GetAdaptiveMtDataset", + "file": "translate_v3_generated_translation_service_get_adaptive_mt_dataset_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtSentences_async", + "regionTag": "translate_v3_generated_TranslationService_GetAdaptiveMtDataset_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2737,12 +2747,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_list_adaptive_mt_sentences_async.py" + "title": "translate_v3_generated_translation_service_get_adaptive_mt_dataset_async.py" }, { "canonical": true, @@ -2751,22 +2761,22 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.list_adaptive_mt_sentences", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_adaptive_mt_dataset", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtSentences", + "fullName": "google.cloud.translation.v3.TranslationService.GetAdaptiveMtDataset", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "ListAdaptiveMtSentences" + "shortName": "GetAdaptiveMtDataset" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.ListAdaptiveMtSentencesRequest" + "type": "google.cloud.translate_v3.types.GetAdaptiveMtDatasetRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -2782,22 +2792,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtSentencesPager", - "shortName": "list_adaptive_mt_sentences" + "resultType": "google.cloud.translate_v3.types.AdaptiveMtDataset", + "shortName": "get_adaptive_mt_dataset" }, - "description": "Sample for ListAdaptiveMtSentences", - "file": "translate_v3_generated_translation_service_list_adaptive_mt_sentences_sync.py", + "description": "Sample for GetAdaptiveMtDataset", + "file": "translate_v3_generated_translation_service_get_adaptive_mt_dataset_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtSentences_sync", + "regionTag": "translate_v3_generated_TranslationService_GetAdaptiveMtDataset_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2817,12 +2827,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_list_adaptive_mt_sentences_sync.py" + "title": "translate_v3_generated_translation_service_get_adaptive_mt_dataset_sync.py" }, { "canonical": true, @@ -2832,22 +2842,22 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.list_glossaries", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_adaptive_mt_file", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.ListGlossaries", + "fullName": "google.cloud.translation.v3.TranslationService.GetAdaptiveMtFile", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "ListGlossaries" + "shortName": "GetAdaptiveMtFile" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.ListGlossariesRequest" + "type": "google.cloud.translate_v3.types.GetAdaptiveMtFileRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -2863,22 +2873,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListGlossariesAsyncPager", - "shortName": "list_glossaries" + "resultType": "google.cloud.translate_v3.types.AdaptiveMtFile", + "shortName": "get_adaptive_mt_file" }, - "description": "Sample for ListGlossaries", - "file": "translate_v3_generated_translation_service_list_glossaries_async.py", + "description": "Sample for GetAdaptiveMtFile", + "file": "translate_v3_generated_translation_service_get_adaptive_mt_file_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_ListGlossaries_async", + "regionTag": "translate_v3_generated_TranslationService_GetAdaptiveMtFile_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2898,12 +2908,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_list_glossaries_async.py" + "title": "translate_v3_generated_translation_service_get_adaptive_mt_file_async.py" }, { "canonical": true, @@ -2912,22 +2922,22 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.list_glossaries", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_adaptive_mt_file", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.ListGlossaries", + "fullName": "google.cloud.translation.v3.TranslationService.GetAdaptiveMtFile", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "ListGlossaries" + "shortName": "GetAdaptiveMtFile" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.ListGlossariesRequest" + "type": "google.cloud.translate_v3.types.GetAdaptiveMtFileRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -2943,22 +2953,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListGlossariesPager", - "shortName": "list_glossaries" + "resultType": "google.cloud.translate_v3.types.AdaptiveMtFile", + "shortName": "get_adaptive_mt_file" }, - "description": "Sample for ListGlossaries", - "file": "translate_v3_generated_translation_service_list_glossaries_sync.py", + "description": "Sample for GetAdaptiveMtFile", + "file": "translate_v3_generated_translation_service_get_adaptive_mt_file_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_ListGlossaries_sync", + "regionTag": "translate_v3_generated_TranslationService_GetAdaptiveMtFile_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2978,12 +2988,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_list_glossaries_sync.py" + "title": "translate_v3_generated_translation_service_get_adaptive_mt_file_sync.py" }, { "canonical": true, @@ -2993,19 +3003,23 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.translate_document", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_dataset", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.TranslateDocument", + "fullName": "google.cloud.translation.v3.TranslationService.GetDataset", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "TranslateDocument" + "shortName": "GetDataset" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.TranslateDocumentRequest" + "type": "google.cloud.translate_v3.types.GetDatasetRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -3020,22 +3034,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.TranslateDocumentResponse", - "shortName": "translate_document" + "resultType": "google.cloud.translate_v3.types.Dataset", + "shortName": "get_dataset" }, - "description": "Sample for TranslateDocument", - "file": "translate_v3_generated_translation_service_translate_document_async.py", + "description": "Sample for GetDataset", + "file": "translate_v3_generated_translation_service_get_dataset_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_TranslateDocument_async", + "regionTag": "translate_v3_generated_TranslationService_GetDataset_async", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3045,22 +3059,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_translate_document_async.py" + "title": "translate_v3_generated_translation_service_get_dataset_async.py" }, { "canonical": true, @@ -3069,19 +3083,23 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.translate_document", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_dataset", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.TranslateDocument", + "fullName": "google.cloud.translation.v3.TranslationService.GetDataset", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "TranslateDocument" + "shortName": "GetDataset" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.TranslateDocumentRequest" + "type": "google.cloud.translate_v3.types.GetDatasetRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -3096,22 +3114,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.TranslateDocumentResponse", - "shortName": "translate_document" + "resultType": "google.cloud.translate_v3.types.Dataset", + "shortName": "get_dataset" }, - "description": "Sample for TranslateDocument", - "file": "translate_v3_generated_translation_service_translate_document_sync.py", + "description": "Sample for GetDataset", + "file": "translate_v3_generated_translation_service_get_dataset_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_TranslateDocument_sync", + "regionTag": "translate_v3_generated_TranslationService_GetDataset_sync", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3121,22 +3139,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_translate_document_sync.py" + "title": "translate_v3_generated_translation_service_get_dataset_sync.py" }, { "canonical": true, @@ -3146,42 +3164,22 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.translate_text", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_glossary_entry", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.TranslateText", + "fullName": "google.cloud.translation.v3.TranslationService.GetGlossaryEntry", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "TranslateText" + "shortName": "GetGlossaryEntry" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.TranslateTextRequest" + "type": "google.cloud.translate_v3.types.GetGlossaryEntryRequest" }, { - "name": "parent", - "type": "str" - }, - { - "name": "target_language_code", - "type": "str" - }, - { - "name": "contents", - "type": "MutableSequence[str]" - }, - { - "name": "model", - "type": "str" - }, - { - "name": "mime_type", - "type": "str" - }, - { - "name": "source_language_code", + "name": "name", "type": "str" }, { @@ -3197,22 +3195,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.TranslateTextResponse", - "shortName": "translate_text" + "resultType": "google.cloud.translate_v3.types.GlossaryEntry", + "shortName": "get_glossary_entry" }, - "description": "Sample for TranslateText", - "file": "translate_v3_generated_translation_service_translate_text_async.py", + "description": "Sample for GetGlossaryEntry", + "file": "translate_v3_generated_translation_service_get_glossary_entry_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_TranslateText_async", + "regionTag": "translate_v3_generated_TranslationService_GetGlossaryEntry_async", "segments": [ { - "end": 53, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3222,22 +3220,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_translate_text_async.py" + "title": "translate_v3_generated_translation_service_get_glossary_entry_async.py" }, { "canonical": true, @@ -3246,42 +3244,103 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.translate_text", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_glossary_entry", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.TranslateText", + "fullName": "google.cloud.translation.v3.TranslationService.GetGlossaryEntry", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "TranslateText" + "shortName": "GetGlossaryEntry" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.TranslateTextRequest" + "type": "google.cloud.translate_v3.types.GetGlossaryEntryRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { - "name": "target_language_code", - "type": "str" + "name": "retry", + "type": "google.api_core.retry.Retry" }, { - "name": "contents", - "type": "MutableSequence[str]" + "name": "timeout", + "type": "float" }, { - "name": "model", - "type": "str" + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.GlossaryEntry", + "shortName": "get_glossary_entry" + }, + "description": "Sample for GetGlossaryEntry", + "file": "translate_v3_generated_translation_service_get_glossary_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_GetGlossaryEntry_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_get_glossary_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_glossary", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.GetGlossary", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" }, + "shortName": "GetGlossary" + }, + "parameters": [ { - "name": "mime_type", - "type": "str" + "name": "request", + "type": "google.cloud.translate_v3.types.GetGlossaryRequest" }, { - "name": "source_language_code", + "name": "name", "type": "str" }, { @@ -3297,22 +3356,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.TranslateTextResponse", - "shortName": "translate_text" + "resultType": "google.cloud.translate_v3.types.Glossary", + "shortName": "get_glossary" }, - "description": "Sample for TranslateText", - "file": "translate_v3_generated_translation_service_translate_text_sync.py", + "description": "Sample for GetGlossary", + "file": "translate_v3_generated_translation_service_get_glossary_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_TranslateText_sync", + "regionTag": "translate_v3_generated_TranslationService_GetGlossary_async", "segments": [ { - "end": 53, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3322,22 +3381,2911 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_translate_text_sync.py" + "title": "translate_v3_generated_translation_service_get_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_glossary", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.GetGlossary", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "GetGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.GetGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.Glossary", + "shortName": "get_glossary" + }, + "description": "Sample for GetGlossary", + "file": "translate_v3_generated_translation_service_get_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_GetGlossary_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_get_glossary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_model", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.GetModel", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "GetModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.GetModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.Model", + "shortName": "get_model" + }, + "description": "Sample for GetModel", + "file": "translate_v3_generated_translation_service_get_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_GetModel_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_get_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_model", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.GetModel", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "GetModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.GetModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.Model", + "shortName": "get_model" + }, + "description": "Sample for GetModel", + "file": "translate_v3_generated_translation_service_get_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_GetModel_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_get_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_supported_languages", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.GetSupportedLanguages", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "GetSupportedLanguages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.GetSupportedLanguagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "display_language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.SupportedLanguages", + "shortName": "get_supported_languages" + }, + "description": "Sample for GetSupportedLanguages", + "file": "translate_v3_generated_translation_service_get_supported_languages_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_GetSupportedLanguages_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_get_supported_languages_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_supported_languages", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.GetSupportedLanguages", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "GetSupportedLanguages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.GetSupportedLanguagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "display_language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.SupportedLanguages", + "shortName": "get_supported_languages" + }, + "description": "Sample for GetSupportedLanguages", + "file": "translate_v3_generated_translation_service_get_supported_languages_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_GetSupportedLanguages_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_get_supported_languages_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.import_adaptive_mt_file", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ImportAdaptiveMtFile", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ImportAdaptiveMtFile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ImportAdaptiveMtFileRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.ImportAdaptiveMtFileResponse", + "shortName": "import_adaptive_mt_file" + }, + "description": "Sample for ImportAdaptiveMtFile", + "file": "translate_v3_generated_translation_service_import_adaptive_mt_file_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ImportAdaptiveMtFile_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_import_adaptive_mt_file_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.import_adaptive_mt_file", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ImportAdaptiveMtFile", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ImportAdaptiveMtFile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ImportAdaptiveMtFileRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.ImportAdaptiveMtFileResponse", + "shortName": "import_adaptive_mt_file" + }, + "description": "Sample for ImportAdaptiveMtFile", + "file": "translate_v3_generated_translation_service_import_adaptive_mt_file_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ImportAdaptiveMtFile_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_import_adaptive_mt_file_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.import_data", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ImportData", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ImportData" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ImportDataRequest" + }, + { + "name": "dataset", + "type": "str" + }, + { + "name": "input_config", + "type": "google.cloud.translate_v3.types.DatasetInputConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_data" + }, + "description": "Sample for ImportData", + "file": "translate_v3_generated_translation_service_import_data_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ImportData_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_import_data_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.import_data", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ImportData", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ImportData" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ImportDataRequest" + }, + { + "name": "dataset", + "type": "str" + }, + { + "name": "input_config", + "type": "google.cloud.translate_v3.types.DatasetInputConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_data" + }, + "description": "Sample for ImportData", + "file": "translate_v3_generated_translation_service_import_data_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ImportData_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_import_data_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.list_adaptive_mt_datasets", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtDatasets", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ListAdaptiveMtDatasets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ListAdaptiveMtDatasetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtDatasetsAsyncPager", + "shortName": "list_adaptive_mt_datasets" + }, + "description": "Sample for ListAdaptiveMtDatasets", + "file": "translate_v3_generated_translation_service_list_adaptive_mt_datasets_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtDatasets_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_list_adaptive_mt_datasets_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.list_adaptive_mt_datasets", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtDatasets", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ListAdaptiveMtDatasets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ListAdaptiveMtDatasetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtDatasetsPager", + "shortName": "list_adaptive_mt_datasets" + }, + "description": "Sample for ListAdaptiveMtDatasets", + "file": "translate_v3_generated_translation_service_list_adaptive_mt_datasets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtDatasets_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_list_adaptive_mt_datasets_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.list_adaptive_mt_files", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtFiles", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ListAdaptiveMtFiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ListAdaptiveMtFilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtFilesAsyncPager", + "shortName": "list_adaptive_mt_files" + }, + "description": "Sample for ListAdaptiveMtFiles", + "file": "translate_v3_generated_translation_service_list_adaptive_mt_files_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtFiles_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_list_adaptive_mt_files_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.list_adaptive_mt_files", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtFiles", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ListAdaptiveMtFiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ListAdaptiveMtFilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtFilesPager", + "shortName": "list_adaptive_mt_files" + }, + "description": "Sample for ListAdaptiveMtFiles", + "file": "translate_v3_generated_translation_service_list_adaptive_mt_files_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtFiles_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_list_adaptive_mt_files_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.list_adaptive_mt_sentences", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtSentences", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ListAdaptiveMtSentences" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ListAdaptiveMtSentencesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtSentencesAsyncPager", + "shortName": "list_adaptive_mt_sentences" + }, + "description": "Sample for ListAdaptiveMtSentences", + "file": "translate_v3_generated_translation_service_list_adaptive_mt_sentences_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtSentences_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_list_adaptive_mt_sentences_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.list_adaptive_mt_sentences", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtSentences", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ListAdaptiveMtSentences" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ListAdaptiveMtSentencesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtSentencesPager", + "shortName": "list_adaptive_mt_sentences" + }, + "description": "Sample for ListAdaptiveMtSentences", + "file": "translate_v3_generated_translation_service_list_adaptive_mt_sentences_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtSentences_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_list_adaptive_mt_sentences_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.list_datasets", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ListDatasets", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ListDatasets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ListDatasetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListDatasetsAsyncPager", + "shortName": "list_datasets" + }, + "description": "Sample for ListDatasets", + "file": "translate_v3_generated_translation_service_list_datasets_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ListDatasets_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_list_datasets_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.list_datasets", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ListDatasets", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ListDatasets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ListDatasetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListDatasetsPager", + "shortName": "list_datasets" + }, + "description": "Sample for ListDatasets", + "file": "translate_v3_generated_translation_service_list_datasets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ListDatasets_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_list_datasets_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.list_examples", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ListExamples", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ListExamples" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ListExamplesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListExamplesAsyncPager", + "shortName": "list_examples" + }, + "description": "Sample for ListExamples", + "file": "translate_v3_generated_translation_service_list_examples_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ListExamples_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_list_examples_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.list_examples", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ListExamples", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ListExamples" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ListExamplesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListExamplesPager", + "shortName": "list_examples" + }, + "description": "Sample for ListExamples", + "file": "translate_v3_generated_translation_service_list_examples_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ListExamples_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_list_examples_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.list_glossaries", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ListGlossaries", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ListGlossaries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ListGlossariesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListGlossariesAsyncPager", + "shortName": "list_glossaries" + }, + "description": "Sample for ListGlossaries", + "file": "translate_v3_generated_translation_service_list_glossaries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ListGlossaries_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_list_glossaries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.list_glossaries", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ListGlossaries", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ListGlossaries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ListGlossariesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListGlossariesPager", + "shortName": "list_glossaries" + }, + "description": "Sample for ListGlossaries", + "file": "translate_v3_generated_translation_service_list_glossaries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ListGlossaries_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_list_glossaries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.list_glossary_entries", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ListGlossaryEntries", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ListGlossaryEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ListGlossaryEntriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListGlossaryEntriesAsyncPager", + "shortName": "list_glossary_entries" + }, + "description": "Sample for ListGlossaryEntries", + "file": "translate_v3_generated_translation_service_list_glossary_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ListGlossaryEntries_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_list_glossary_entries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.list_glossary_entries", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ListGlossaryEntries", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ListGlossaryEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ListGlossaryEntriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListGlossaryEntriesPager", + "shortName": "list_glossary_entries" + }, + "description": "Sample for ListGlossaryEntries", + "file": "translate_v3_generated_translation_service_list_glossary_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ListGlossaryEntries_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_list_glossary_entries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.list_models", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ListModels", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ListModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ListModelsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListModelsAsyncPager", + "shortName": "list_models" + }, + "description": "Sample for ListModels", + "file": "translate_v3_generated_translation_service_list_models_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ListModels_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_list_models_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.list_models", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.ListModels", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "ListModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.ListModelsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListModelsPager", + "shortName": "list_models" + }, + "description": "Sample for ListModels", + "file": "translate_v3_generated_translation_service_list_models_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_ListModels_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_list_models_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.romanize_text", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.RomanizeText", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "RomanizeText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.RomanizeTextRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.RomanizeTextResponse", + "shortName": "romanize_text" + }, + "description": "Sample for RomanizeText", + "file": "translate_v3_generated_translation_service_romanize_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_RomanizeText_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_romanize_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.romanize_text", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.RomanizeText", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "RomanizeText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.RomanizeTextRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.RomanizeTextResponse", + "shortName": "romanize_text" + }, + "description": "Sample for RomanizeText", + "file": "translate_v3_generated_translation_service_romanize_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_RomanizeText_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_romanize_text_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.translate_document", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.TranslateDocument", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "TranslateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.TranslateDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.TranslateDocumentResponse", + "shortName": "translate_document" + }, + "description": "Sample for TranslateDocument", + "file": "translate_v3_generated_translation_service_translate_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_TranslateDocument_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_translate_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.translate_document", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.TranslateDocument", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "TranslateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.TranslateDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.TranslateDocumentResponse", + "shortName": "translate_document" + }, + "description": "Sample for TranslateDocument", + "file": "translate_v3_generated_translation_service_translate_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_TranslateDocument_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_translate_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.translate_text", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.TranslateText", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "TranslateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.TranslateTextRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "target_language_code", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[str]" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "mime_type", + "type": "str" + }, + { + "name": "source_language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.TranslateTextResponse", + "shortName": "translate_text" + }, + "description": "Sample for TranslateText", + "file": "translate_v3_generated_translation_service_translate_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_TranslateText_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_translate_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.translate_text", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.TranslateText", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "TranslateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.TranslateTextRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "target_language_code", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[str]" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "mime_type", + "type": "str" + }, + { + "name": "source_language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.TranslateTextResponse", + "shortName": "translate_text" + }, + "description": "Sample for TranslateText", + "file": "translate_v3_generated_translation_service_translate_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_TranslateText_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_translate_text_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.update_glossary_entry", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.UpdateGlossaryEntry", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "UpdateGlossaryEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.UpdateGlossaryEntryRequest" + }, + { + "name": "glossary_entry", + "type": "google.cloud.translate_v3.types.GlossaryEntry" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.GlossaryEntry", + "shortName": "update_glossary_entry" + }, + "description": "Sample for UpdateGlossaryEntry", + "file": "translate_v3_generated_translation_service_update_glossary_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_UpdateGlossaryEntry_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_update_glossary_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.update_glossary_entry", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.UpdateGlossaryEntry", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "UpdateGlossaryEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.UpdateGlossaryEntryRequest" + }, + { + "name": "glossary_entry", + "type": "google.cloud.translate_v3.types.GlossaryEntry" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.GlossaryEntry", + "shortName": "update_glossary_entry" + }, + "description": "Sample for UpdateGlossaryEntry", + "file": "translate_v3_generated_translation_service_update_glossary_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_UpdateGlossaryEntry_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_update_glossary_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.update_glossary", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.UpdateGlossary", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "UpdateGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.UpdateGlossaryRequest" + }, + { + "name": "glossary", + "type": "google.cloud.translate_v3.types.Glossary" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_glossary" + }, + "description": "Sample for UpdateGlossary", + "file": "translate_v3_generated_translation_service_update_glossary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_UpdateGlossary_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_update_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.update_glossary", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.UpdateGlossary", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "UpdateGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.UpdateGlossaryRequest" + }, + { + "name": "glossary", + "type": "google.cloud.translate_v3.types.Glossary" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_glossary" + }, + "description": "Sample for UpdateGlossary", + "file": "translate_v3_generated_translation_service_update_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_UpdateGlossary_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_update_glossary_sync.py" } ] } diff --git a/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3beta1.json b/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3beta1.json index 31266f429378..42d10a5032f6 100644 --- a/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3beta1.json +++ b/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-translate", - "version": "3.15.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_dataset_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_dataset_async.py new file mode 100644 index 000000000000..9df6320a9211 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_dataset_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_CreateDataset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_create_dataset(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.CreateDatasetRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_dataset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_CreateDataset_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_dataset_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_dataset_sync.py new file mode 100644 index 000000000000..8f2dab2667b1 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_dataset_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_CreateDataset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_create_dataset(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.CreateDatasetRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_dataset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_CreateDataset_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_entry_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_entry_async.py new file mode 100644 index 000000000000..5c95ee758ec8 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_entry_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGlossaryEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_CreateGlossaryEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_create_glossary_entry(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.CreateGlossaryEntryRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_glossary_entry(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_CreateGlossaryEntry_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_entry_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_entry_sync.py new file mode 100644 index 000000000000..f02bbb04dbc1 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_entry_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGlossaryEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_CreateGlossaryEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_create_glossary_entry(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.CreateGlossaryEntryRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_glossary_entry(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_CreateGlossaryEntry_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_model_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_model_async.py new file mode 100644 index 000000000000..1581f634b25a --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_model_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_CreateModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_create_model(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.CreateModelRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_CreateModel_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_model_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_model_sync.py new file mode 100644 index 000000000000..4fd92c8d415b --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_model_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_CreateModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_create_model(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.CreateModelRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_CreateModel_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_dataset_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_dataset_async.py new file mode 100644 index 000000000000..ee870b0704f0 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_dataset_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_DeleteDataset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_delete_dataset(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.DeleteDatasetRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_dataset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_DeleteDataset_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_dataset_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_dataset_sync.py new file mode 100644 index 000000000000..c75e1799ceb0 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_dataset_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_DeleteDataset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_delete_dataset(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.DeleteDatasetRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_dataset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_DeleteDataset_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_glossary_entry_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_glossary_entry_async.py new file mode 100644 index 000000000000..a8993ec15dcf --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_glossary_entry_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGlossaryEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_DeleteGlossaryEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_delete_glossary_entry(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.DeleteGlossaryEntryRequest( + name="name_value", + ) + + # Make the request + await client.delete_glossary_entry(request=request) + + +# [END translate_v3_generated_TranslationService_DeleteGlossaryEntry_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_glossary_entry_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_glossary_entry_sync.py new file mode 100644 index 000000000000..522cfb8f0c32 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_glossary_entry_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGlossaryEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_DeleteGlossaryEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_delete_glossary_entry(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.DeleteGlossaryEntryRequest( + name="name_value", + ) + + # Make the request + client.delete_glossary_entry(request=request) + + +# [END translate_v3_generated_TranslationService_DeleteGlossaryEntry_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_model_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_model_async.py new file mode 100644 index 000000000000..5a242054dc85 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_model_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_DeleteModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_delete_model(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.DeleteModelRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_DeleteModel_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_model_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_model_sync.py new file mode 100644 index 000000000000..03a9e044e6a2 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_model_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_DeleteModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_delete_model(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.DeleteModelRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_DeleteModel_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_export_data_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_export_data_async.py new file mode 100644 index 000000000000..54d19ae8c38f --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_export_data_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportData +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ExportData_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_export_data(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + output_config = translate_v3.DatasetOutputConfig() + output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" + + request = translate_v3.ExportDataRequest( + dataset="dataset_value", + output_config=output_config, + ) + + # Make the request + operation = client.export_data(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_ExportData_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_export_data_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_export_data_sync.py new file mode 100644 index 000000000000..b7520d80d1c3 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_export_data_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportData +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ExportData_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_export_data(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + output_config = translate_v3.DatasetOutputConfig() + output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" + + request = translate_v3.ExportDataRequest( + dataset="dataset_value", + output_config=output_config, + ) + + # Make the request + operation = client.export_data(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_ExportData_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_dataset_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_dataset_async.py new file mode 100644 index 000000000000..cf7a1f998375 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_dataset_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_GetDataset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_get_dataset(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.GetDatasetRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dataset(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_GetDataset_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_dataset_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_dataset_sync.py new file mode 100644 index 000000000000..406f0ae572f1 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_dataset_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_GetDataset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_get_dataset(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.GetDatasetRequest( + name="name_value", + ) + + # Make the request + response = client.get_dataset(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_GetDataset_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_entry_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_entry_async.py new file mode 100644 index 000000000000..0725609cf6a3 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_entry_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGlossaryEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_GetGlossaryEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_get_glossary_entry(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.GetGlossaryEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_glossary_entry(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_GetGlossaryEntry_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_entry_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_entry_sync.py new file mode 100644 index 000000000000..fb0c3bb6568d --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_entry_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGlossaryEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_GetGlossaryEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_get_glossary_entry(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.GetGlossaryEntryRequest( + name="name_value", + ) + + # Make the request + response = client.get_glossary_entry(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_GetGlossaryEntry_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_model_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_model_async.py new file mode 100644 index 000000000000..52e7f0b68289 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_model_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_GetModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_get_model(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.GetModelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_model(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_GetModel_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_model_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_model_sync.py new file mode 100644 index 000000000000..311fdcf1777f --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_model_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_GetModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_get_model(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.GetModelRequest( + name="name_value", + ) + + # Make the request + response = client.get_model(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_GetModel_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_data_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_data_async.py new file mode 100644 index 000000000000..09d9390451b3 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_data_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportData +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ImportData_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_import_data(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ImportDataRequest( + dataset="dataset_value", + ) + + # Make the request + operation = client.import_data(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_ImportData_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_data_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_data_sync.py new file mode 100644 index 000000000000..058caa1d7727 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_data_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportData +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ImportData_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_import_data(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ImportDataRequest( + dataset="dataset_value", + ) + + # Make the request + operation = client.import_data(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_ImportData_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_datasets_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_datasets_async.py new file mode 100644 index 000000000000..6f513ac864a1 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_datasets_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatasets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ListDatasets_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_list_datasets(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ListDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_datasets(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END translate_v3_generated_TranslationService_ListDatasets_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_datasets_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_datasets_sync.py new file mode 100644 index 000000000000..e59f10c2d85f --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_datasets_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatasets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ListDatasets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_list_datasets(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ListDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_datasets(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END translate_v3_generated_TranslationService_ListDatasets_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_examples_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_examples_async.py new file mode 100644 index 000000000000..1a596c996f1e --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_examples_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListExamples +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ListExamples_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_list_examples(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ListExamplesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_examples(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END translate_v3_generated_TranslationService_ListExamples_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_examples_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_examples_sync.py new file mode 100644 index 000000000000..d7b2eb1ebd1d --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_examples_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListExamples +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ListExamples_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_list_examples(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ListExamplesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_examples(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END translate_v3_generated_TranslationService_ListExamples_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossary_entries_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossary_entries_async.py new file mode 100644 index 000000000000..a9911bb0f8ff --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossary_entries_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGlossaryEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ListGlossaryEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_list_glossary_entries(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ListGlossaryEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END translate_v3_generated_TranslationService_ListGlossaryEntries_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossary_entries_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossary_entries_sync.py new file mode 100644 index 000000000000..294bec2c6e96 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossary_entries_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGlossaryEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ListGlossaryEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_list_glossary_entries(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ListGlossaryEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END translate_v3_generated_TranslationService_ListGlossaryEntries_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_models_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_models_async.py new file mode 100644 index 000000000000..c2f164cc4d90 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_models_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ListModels_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_list_models(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ListModelsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END translate_v3_generated_TranslationService_ListModels_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_models_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_models_sync.py new file mode 100644 index 000000000000..f8e93748e9cb --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_models_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ListModels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_list_models(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ListModelsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END translate_v3_generated_TranslationService_ListModels_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_romanize_text_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_romanize_text_async.py new file mode 100644 index 000000000000..ca28ea6c9b13 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_romanize_text_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RomanizeText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_RomanizeText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_romanize_text(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.RomanizeTextRequest( + parent="parent_value", + contents=['contents_value1', 'contents_value2'], + ) + + # Make the request + response = await client.romanize_text(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_RomanizeText_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_romanize_text_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_romanize_text_sync.py new file mode 100644 index 000000000000..64262513c25b --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_romanize_text_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RomanizeText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_RomanizeText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_romanize_text(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.RomanizeTextRequest( + parent="parent_value", + contents=['contents_value1', 'contents_value2'], + ) + + # Make the request + response = client.romanize_text(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_RomanizeText_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_async.py new file mode 100644 index 000000000000..90a48e9e38fa --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_UpdateGlossary_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_update_glossary(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + glossary = translate_v3.Glossary() + glossary.name = "name_value" + + request = translate_v3.UpdateGlossaryRequest( + glossary=glossary, + ) + + # Make the request + operation = client.update_glossary(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_UpdateGlossary_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_entry_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_entry_async.py new file mode 100644 index 000000000000..3b7efa460f30 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_entry_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGlossaryEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_UpdateGlossaryEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_update_glossary_entry(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.UpdateGlossaryEntryRequest( + ) + + # Make the request + response = await client.update_glossary_entry(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_UpdateGlossaryEntry_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_entry_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_entry_sync.py new file mode 100644 index 000000000000..05a8212f9866 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_entry_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGlossaryEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_UpdateGlossaryEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_update_glossary_entry(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.UpdateGlossaryEntryRequest( + ) + + # Make the request + response = client.update_glossary_entry(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_UpdateGlossaryEntry_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_sync.py new file mode 100644 index 000000000000..676cce918fb6 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_UpdateGlossary_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_update_glossary(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + glossary = translate_v3.Glossary() + glossary.name = "name_value" + + request = translate_v3.UpdateGlossaryRequest( + glossary=glossary, + ) + + # Make the request + operation = client.update_glossary(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_UpdateGlossary_sync] diff --git a/packages/google-cloud-translate/scripts/fixup_translate_v3_keywords.py b/packages/google-cloud-translate/scripts/fixup_translate_v3_keywords.py index ae06dc3dd594..8f65400c34cd 100644 --- a/packages/google-cloud-translate/scripts/fixup_translate_v3_keywords.py +++ b/packages/google-cloud-translate/scripts/fixup_translate_v3_keywords.py @@ -39,26 +39,44 @@ def partition( class translateCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'adaptive_mt_translate': ('parent', 'dataset', 'content', ), + 'adaptive_mt_translate': ('parent', 'dataset', 'content', 'reference_sentence_config', 'glossary_config', ), 'batch_translate_document': ('parent', 'source_language_code', 'target_language_codes', 'input_configs', 'output_config', 'models', 'glossaries', 'format_conversions', 'customized_attribution', 'enable_shadow_removal_native_pdf', 'enable_rotation_correction', ), 'batch_translate_text': ('parent', 'source_language_code', 'target_language_codes', 'input_configs', 'output_config', 'models', 'glossaries', 'labels', ), 'create_adaptive_mt_dataset': ('parent', 'adaptive_mt_dataset', ), + 'create_dataset': ('parent', 'dataset', ), 'create_glossary': ('parent', 'glossary', ), + 'create_glossary_entry': ('parent', 'glossary_entry', ), + 'create_model': ('parent', 'model', ), 'delete_adaptive_mt_dataset': ('name', ), 'delete_adaptive_mt_file': ('name', ), + 'delete_dataset': ('name', ), 'delete_glossary': ('name', ), + 'delete_glossary_entry': ('name', ), + 'delete_model': ('name', ), 'detect_language': ('parent', 'model', 'content', 'mime_type', 'labels', ), + 'export_data': ('dataset', 'output_config', ), 'get_adaptive_mt_dataset': ('name', ), 'get_adaptive_mt_file': ('name', ), + 'get_dataset': ('name', ), 'get_glossary': ('name', ), + 'get_glossary_entry': ('name', ), + 'get_model': ('name', ), 'get_supported_languages': ('parent', 'display_language_code', 'model', ), 'import_adaptive_mt_file': ('parent', 'file_input_source', 'gcs_input_source', ), + 'import_data': ('dataset', 'input_config', ), 'list_adaptive_mt_datasets': ('parent', 'page_size', 'page_token', 'filter', ), 'list_adaptive_mt_files': ('parent', 'page_size', 'page_token', ), 'list_adaptive_mt_sentences': ('parent', 'page_size', 'page_token', ), + 'list_datasets': ('parent', 'page_size', 'page_token', ), + 'list_examples': ('parent', 'filter', 'page_size', 'page_token', ), 'list_glossaries': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_glossary_entries': ('parent', 'page_size', 'page_token', ), + 'list_models': ('parent', 'filter', 'page_size', 'page_token', ), + 'romanize_text': ('parent', 'contents', 'source_language_code', ), 'translate_document': ('parent', 'target_language_code', 'document_input_config', 'source_language_code', 'document_output_config', 'model', 'glossary_config', 'labels', 'customized_attribution', 'is_translate_native_pdf_only', 'enable_shadow_removal_native_pdf', 'enable_rotation_correction', ), - 'translate_text': ('contents', 'target_language_code', 'parent', 'mime_type', 'source_language_code', 'model', 'glossary_config', 'labels', ), + 'translate_text': ('contents', 'target_language_code', 'parent', 'mime_type', 'source_language_code', 'model', 'glossary_config', 'transliteration_config', 'labels', ), + 'update_glossary': ('glossary', 'update_mask', ), + 'update_glossary_entry': ('glossary_entry', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-translate/setup.py b/packages/google-cloud-translate/setup.py index 2dfc9eb45fad..388556f63f0a 100644 --- a/packages/google-cloud-translate/setup.py +++ b/packages/google-cloud-translate/setup.py @@ -46,6 +46,7 @@ "google-cloud-core >= 1.4.4, <3.0.0dev", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-translate" diff --git a/packages/google-cloud-translate/testing/constraints-3.10.txt b/packages/google-cloud-translate/testing/constraints-3.10.txt index ed7f9aed2559..ad3f0fa58e2d 100644 --- a/packages/google-cloud-translate/testing/constraints-3.10.txt +++ b/packages/google-cloud-translate/testing/constraints-3.10.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-translate/testing/constraints-3.11.txt b/packages/google-cloud-translate/testing/constraints-3.11.txt index ed7f9aed2559..ad3f0fa58e2d 100644 --- a/packages/google-cloud-translate/testing/constraints-3.11.txt +++ b/packages/google-cloud-translate/testing/constraints-3.11.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-translate/testing/constraints-3.12.txt b/packages/google-cloud-translate/testing/constraints-3.12.txt index ed7f9aed2559..ad3f0fa58e2d 100644 --- a/packages/google-cloud-translate/testing/constraints-3.12.txt +++ b/packages/google-cloud-translate/testing/constraints-3.12.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-translate/testing/constraints-3.7.txt b/packages/google-cloud-translate/testing/constraints-3.7.txt index c8530684de36..6ef964d6f0d7 100644 --- a/packages/google-cloud-translate/testing/constraints-3.7.txt +++ b/packages/google-cloud-translate/testing/constraints-3.7.txt @@ -9,3 +9,4 @@ google-auth==2.14.1 proto-plus==1.22.3 google-cloud-core==1.4.4 protobuf==3.20.2 +grpc-google-iam-v1==0.12.4 diff --git a/packages/google-cloud-translate/testing/constraints-3.8.txt b/packages/google-cloud-translate/testing/constraints-3.8.txt index ed7f9aed2559..ad3f0fa58e2d 100644 --- a/packages/google-cloud-translate/testing/constraints-3.8.txt +++ b/packages/google-cloud-translate/testing/constraints-3.8.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-translate/testing/constraints-3.9.txt b/packages/google-cloud-translate/testing/constraints-3.9.txt index ed7f9aed2559..ad3f0fa58e2d 100644 --- a/packages/google-cloud-translate/testing/constraints-3.9.txt +++ b/packages/google-cloud-translate/testing/constraints-3.9.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py b/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py index a464fc834549..afa790ddb24f 100644 --- a/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py +++ b/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py @@ -42,8 +42,14 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore import grpc @@ -60,7 +66,12 @@ pagers, transports, ) -from google.cloud.translate_v3.types import adaptive_mt, common, translation_service +from google.cloud.translate_v3.types import ( + adaptive_mt, + automl_translation, + common, + translation_service, +) def client_cert_source_callback(): @@ -1591,11 +1602,11 @@ async def test_translate_text_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - translation_service.DetectLanguageRequest, + translation_service.RomanizeTextRequest, dict, ], ) -def test_detect_language(request_type, transport: str = "grpc"): +def test_romanize_text(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1606,22 +1617,22 @@ def test_detect_language(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.detect_language), "__call__") as call: + with mock.patch.object(type(client.transport.romanize_text), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = translation_service.DetectLanguageResponse() - response = client.detect_language(request) + call.return_value = translation_service.RomanizeTextResponse() + response = client.romanize_text(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = translation_service.DetectLanguageRequest() + request = translation_service.RomanizeTextRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.DetectLanguageResponse) + assert isinstance(response, translation_service.RomanizeTextResponse) -def test_detect_language_empty_call(): +def test_romanize_text_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( @@ -1630,17 +1641,17 @@ def test_detect_language_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.detect_language), "__call__") as call: + with mock.patch.object(type(client.transport.romanize_text), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.detect_language() + client.romanize_text() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.DetectLanguageRequest() + assert args[0] == translation_service.RomanizeTextRequest() -def test_detect_language_non_empty_request_with_auto_populated_field(): +def test_romanize_text_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -1651,30 +1662,26 @@ def test_detect_language_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = translation_service.DetectLanguageRequest( + request = translation_service.RomanizeTextRequest( parent="parent_value", - model="model_value", - content="content_value", - mime_type="mime_type_value", + source_language_code="source_language_code_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.detect_language), "__call__") as call: + with mock.patch.object(type(client.transport.romanize_text), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.detect_language(request=request) + client.romanize_text(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.DetectLanguageRequest( + assert args[0] == translation_service.RomanizeTextRequest( parent="parent_value", - model="model_value", - content="content_value", - mime_type="mime_type_value", + source_language_code="source_language_code_value", ) -def test_detect_language_use_cached_wrapped_rpc(): +def test_romanize_text_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -1688,21 +1695,21 @@ def test_detect_language_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.detect_language in client._transport._wrapped_methods + assert client._transport.romanize_text in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.detect_language] = mock_rpc + client._transport._wrapped_methods[client._transport.romanize_text] = mock_rpc request = {} - client.detect_language(request) + client.romanize_text(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.detect_language(request) + client.romanize_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -1710,7 +1717,7 @@ def test_detect_language_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_detect_language_empty_call_async(): +async def test_romanize_text_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -1719,19 +1726,19 @@ async def test_detect_language_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.detect_language), "__call__") as call: + with mock.patch.object(type(client.transport.romanize_text), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.DetectLanguageResponse() + translation_service.RomanizeTextResponse() ) - response = await client.detect_language() + response = await client.romanize_text() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.DetectLanguageRequest() + assert args[0] == translation_service.RomanizeTextRequest() @pytest.mark.asyncio -async def test_detect_language_async_use_cached_wrapped_rpc( +async def test_romanize_text_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1748,23 +1755,23 @@ async def test_detect_language_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.detect_language + client._client._transport.romanize_text in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.detect_language + client._client._transport.romanize_text ] = mock_object request = {} - await client.detect_language(request) + await client.romanize_text(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.detect_language(request) + await client.romanize_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -1772,9 +1779,9 @@ async def test_detect_language_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_detect_language_async( +async def test_romanize_text_async( transport: str = "grpc_asyncio", - request_type=translation_service.DetectLanguageRequest, + request_type=translation_service.RomanizeTextRequest, ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1786,43 +1793,43 @@ async def test_detect_language_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.detect_language), "__call__") as call: + with mock.patch.object(type(client.transport.romanize_text), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.DetectLanguageResponse() + translation_service.RomanizeTextResponse() ) - response = await client.detect_language(request) + response = await client.romanize_text(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = translation_service.DetectLanguageRequest() + request = translation_service.RomanizeTextRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.DetectLanguageResponse) + assert isinstance(response, translation_service.RomanizeTextResponse) @pytest.mark.asyncio -async def test_detect_language_async_from_dict(): - await test_detect_language_async(request_type=dict) +async def test_romanize_text_async_from_dict(): + await test_romanize_text_async(request_type=dict) -def test_detect_language_field_headers(): +def test_romanize_text_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.DetectLanguageRequest() + request = translation_service.RomanizeTextRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.detect_language), "__call__") as call: - call.return_value = translation_service.DetectLanguageResponse() - client.detect_language(request) + with mock.patch.object(type(client.transport.romanize_text), "__call__") as call: + call.return_value = translation_service.RomanizeTextResponse() + client.romanize_text(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1838,23 +1845,23 @@ def test_detect_language_field_headers(): @pytest.mark.asyncio -async def test_detect_language_field_headers_async(): +async def test_romanize_text_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.DetectLanguageRequest() + request = translation_service.RomanizeTextRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.detect_language), "__call__") as call: + with mock.patch.object(type(client.transport.romanize_text), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.DetectLanguageResponse() + translation_service.RomanizeTextResponse() ) - await client.detect_language(request) + await client.romanize_text(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -1869,22 +1876,20 @@ async def test_detect_language_field_headers_async(): ) in kw["metadata"] -def test_detect_language_flattened(): +def test_romanize_text_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.detect_language), "__call__") as call: + with mock.patch.object(type(client.transport.romanize_text), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = translation_service.DetectLanguageResponse() + call.return_value = translation_service.RomanizeTextResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.detect_language( + client.romanize_text( parent="parent_value", - model="model_value", - mime_type="mime_type_value", - content="content_value", + contents=["contents_value"], ) # Establish that the underlying call was made with the expected @@ -1894,16 +1899,12 @@ def test_detect_language_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].model - mock_val = "model_value" - assert arg == mock_val - arg = args[0].mime_type - mock_val = "mime_type_value" + arg = args[0].contents + mock_val = ["contents_value"] assert arg == mock_val - assert args[0].content == "content_value" -def test_detect_language_flattened_error(): +def test_romanize_text_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -1911,36 +1912,32 @@ def test_detect_language_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.detect_language( - translation_service.DetectLanguageRequest(), + client.romanize_text( + translation_service.RomanizeTextRequest(), parent="parent_value", - model="model_value", - mime_type="mime_type_value", - content="content_value", + contents=["contents_value"], ) @pytest.mark.asyncio -async def test_detect_language_flattened_async(): +async def test_romanize_text_flattened_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.detect_language), "__call__") as call: + with mock.patch.object(type(client.transport.romanize_text), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = translation_service.DetectLanguageResponse() + call.return_value = translation_service.RomanizeTextResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.DetectLanguageResponse() + translation_service.RomanizeTextResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.detect_language( + response = await client.romanize_text( parent="parent_value", - model="model_value", - mime_type="mime_type_value", - content="content_value", + contents=["contents_value"], ) # Establish that the underlying call was made with the expected @@ -1950,17 +1947,13 @@ async def test_detect_language_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].model - mock_val = "model_value" - assert arg == mock_val - arg = args[0].mime_type - mock_val = "mime_type_value" + arg = args[0].contents + mock_val = ["contents_value"] assert arg == mock_val - assert args[0].content == "content_value" @pytest.mark.asyncio -async def test_detect_language_flattened_error_async(): +async def test_romanize_text_flattened_error_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -1968,23 +1961,21 @@ async def test_detect_language_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.detect_language( - translation_service.DetectLanguageRequest(), + await client.romanize_text( + translation_service.RomanizeTextRequest(), parent="parent_value", - model="model_value", - mime_type="mime_type_value", - content="content_value", + contents=["contents_value"], ) @pytest.mark.parametrize( "request_type", [ - translation_service.GetSupportedLanguagesRequest, + translation_service.DetectLanguageRequest, dict, ], ) -def test_get_supported_languages(request_type, transport: str = "grpc"): +def test_detect_language(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1995,24 +1986,22 @@ def test_get_supported_languages(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_supported_languages), "__call__" - ) as call: + with mock.patch.object(type(client.transport.detect_language), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = translation_service.SupportedLanguages() - response = client.get_supported_languages(request) + call.return_value = translation_service.DetectLanguageResponse() + response = client.detect_language(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = translation_service.GetSupportedLanguagesRequest() + request = translation_service.DetectLanguageRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.SupportedLanguages) + assert isinstance(response, translation_service.DetectLanguageResponse) -def test_get_supported_languages_empty_call(): +def test_detect_language_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( @@ -2021,19 +2010,17 @@ def test_get_supported_languages_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_supported_languages), "__call__" - ) as call: + with mock.patch.object(type(client.transport.detect_language), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_supported_languages() + client.detect_language() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.GetSupportedLanguagesRequest() + assert args[0] == translation_service.DetectLanguageRequest() -def test_get_supported_languages_non_empty_request_with_auto_populated_field(): +def test_detect_language_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -2044,30 +2031,30 @@ def test_get_supported_languages_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = translation_service.GetSupportedLanguagesRequest( + request = translation_service.DetectLanguageRequest( parent="parent_value", - display_language_code="display_language_code_value", model="model_value", + content="content_value", + mime_type="mime_type_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_supported_languages), "__call__" - ) as call: + with mock.patch.object(type(client.transport.detect_language), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_supported_languages(request=request) + client.detect_language(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.GetSupportedLanguagesRequest( + assert args[0] == translation_service.DetectLanguageRequest( parent="parent_value", - display_language_code="display_language_code_value", model="model_value", + content="content_value", + mime_type="mime_type_value", ) -def test_get_supported_languages_use_cached_wrapped_rpc(): +def test_detect_language_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2081,26 +2068,21 @@ def test_get_supported_languages_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_supported_languages - in client._transport._wrapped_methods - ) + assert client._transport.detect_language in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_supported_languages - ] = mock_rpc + client._transport._wrapped_methods[client._transport.detect_language] = mock_rpc request = {} - client.get_supported_languages(request) + client.detect_language(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_supported_languages(request) + client.detect_language(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2108,7 +2090,7 @@ def test_get_supported_languages_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_supported_languages_empty_call_async(): +async def test_detect_language_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -2117,21 +2099,19 @@ async def test_get_supported_languages_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_supported_languages), "__call__" - ) as call: + with mock.patch.object(type(client.transport.detect_language), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.SupportedLanguages() + translation_service.DetectLanguageResponse() ) - response = await client.get_supported_languages() + response = await client.detect_language() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.GetSupportedLanguagesRequest() + assert args[0] == translation_service.DetectLanguageRequest() @pytest.mark.asyncio -async def test_get_supported_languages_async_use_cached_wrapped_rpc( +async def test_detect_language_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2148,23 +2128,23 @@ async def test_get_supported_languages_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_supported_languages + client._client._transport.detect_language in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.get_supported_languages + client._client._transport.detect_language ] = mock_object request = {} - await client.get_supported_languages(request) + await client.detect_language(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.get_supported_languages(request) + await client.detect_language(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2172,9 +2152,9 @@ async def test_get_supported_languages_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_supported_languages_async( +async def test_detect_language_async( transport: str = "grpc_asyncio", - request_type=translation_service.GetSupportedLanguagesRequest, + request_type=translation_service.DetectLanguageRequest, ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2186,47 +2166,43 @@ async def test_get_supported_languages_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_supported_languages), "__call__" - ) as call: + with mock.patch.object(type(client.transport.detect_language), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.SupportedLanguages() + translation_service.DetectLanguageResponse() ) - response = await client.get_supported_languages(request) + response = await client.detect_language(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = translation_service.GetSupportedLanguagesRequest() + request = translation_service.DetectLanguageRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.SupportedLanguages) + assert isinstance(response, translation_service.DetectLanguageResponse) @pytest.mark.asyncio -async def test_get_supported_languages_async_from_dict(): - await test_get_supported_languages_async(request_type=dict) +async def test_detect_language_async_from_dict(): + await test_detect_language_async(request_type=dict) -def test_get_supported_languages_field_headers(): +def test_detect_language_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.GetSupportedLanguagesRequest() + request = translation_service.DetectLanguageRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_supported_languages), "__call__" - ) as call: - call.return_value = translation_service.SupportedLanguages() - client.get_supported_languages(request) + with mock.patch.object(type(client.transport.detect_language), "__call__") as call: + call.return_value = translation_service.DetectLanguageResponse() + client.detect_language(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2242,25 +2218,23 @@ def test_get_supported_languages_field_headers(): @pytest.mark.asyncio -async def test_get_supported_languages_field_headers_async(): +async def test_detect_language_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.GetSupportedLanguagesRequest() + request = translation_service.DetectLanguageRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_supported_languages), "__call__" - ) as call: + with mock.patch.object(type(client.transport.detect_language), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.SupportedLanguages() + translation_service.DetectLanguageResponse() ) - await client.get_supported_languages(request) + await client.detect_language(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2275,23 +2249,22 @@ async def test_get_supported_languages_field_headers_async(): ) in kw["metadata"] -def test_get_supported_languages_flattened(): +def test_detect_language_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_supported_languages), "__call__" - ) as call: + with mock.patch.object(type(client.transport.detect_language), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = translation_service.SupportedLanguages() + call.return_value = translation_service.DetectLanguageResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_supported_languages( + client.detect_language( parent="parent_value", model="model_value", - display_language_code="display_language_code_value", + mime_type="mime_type_value", + content="content_value", ) # Establish that the underlying call was made with the expected @@ -2304,12 +2277,13 @@ def test_get_supported_languages_flattened(): arg = args[0].model mock_val = "model_value" assert arg == mock_val - arg = args[0].display_language_code - mock_val = "display_language_code_value" + arg = args[0].mime_type + mock_val = "mime_type_value" assert arg == mock_val + assert args[0].content == "content_value" -def test_get_supported_languages_flattened_error(): +def test_detect_language_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2317,36 +2291,36 @@ def test_get_supported_languages_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_supported_languages( - translation_service.GetSupportedLanguagesRequest(), + client.detect_language( + translation_service.DetectLanguageRequest(), parent="parent_value", model="model_value", - display_language_code="display_language_code_value", + mime_type="mime_type_value", + content="content_value", ) @pytest.mark.asyncio -async def test_get_supported_languages_flattened_async(): +async def test_detect_language_flattened_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_supported_languages), "__call__" - ) as call: + with mock.patch.object(type(client.transport.detect_language), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = translation_service.SupportedLanguages() + call.return_value = translation_service.DetectLanguageResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.SupportedLanguages() + translation_service.DetectLanguageResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_supported_languages( + response = await client.detect_language( parent="parent_value", model="model_value", - display_language_code="display_language_code_value", + mime_type="mime_type_value", + content="content_value", ) # Establish that the underlying call was made with the expected @@ -2359,13 +2333,14 @@ async def test_get_supported_languages_flattened_async(): arg = args[0].model mock_val = "model_value" assert arg == mock_val - arg = args[0].display_language_code - mock_val = "display_language_code_value" + arg = args[0].mime_type + mock_val = "mime_type_value" assert arg == mock_val + assert args[0].content == "content_value" @pytest.mark.asyncio -async def test_get_supported_languages_flattened_error_async(): +async def test_detect_language_flattened_error_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2373,22 +2348,23 @@ async def test_get_supported_languages_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_supported_languages( - translation_service.GetSupportedLanguagesRequest(), + await client.detect_language( + translation_service.DetectLanguageRequest(), parent="parent_value", model="model_value", - display_language_code="display_language_code_value", + mime_type="mime_type_value", + content="content_value", ) @pytest.mark.parametrize( "request_type", [ - translation_service.TranslateDocumentRequest, + translation_service.GetSupportedLanguagesRequest, dict, ], ) -def test_translate_document(request_type, transport: str = "grpc"): +def test_get_supported_languages(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2400,26 +2376,23 @@ def test_translate_document(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.translate_document), "__call__" + type(client.transport.get_supported_languages), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = translation_service.TranslateDocumentResponse( - model="model_value", - ) - response = client.translate_document(request) + call.return_value = translation_service.SupportedLanguages() + response = client.get_supported_languages(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = translation_service.TranslateDocumentRequest() + request = translation_service.GetSupportedLanguagesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.TranslateDocumentResponse) - assert response.model == "model_value" + assert isinstance(response, translation_service.SupportedLanguages) -def test_translate_document_empty_call(): +def test_get_supported_languages_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( @@ -2429,18 +2402,18 @@ def test_translate_document_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.translate_document), "__call__" + type(client.transport.get_supported_languages), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.translate_document() + client.get_supported_languages() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.TranslateDocumentRequest() + assert args[0] == translation_service.GetSupportedLanguagesRequest() -def test_translate_document_non_empty_request_with_auto_populated_field(): +def test_get_supported_languages_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -2451,34 +2424,30 @@ def test_translate_document_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = translation_service.TranslateDocumentRequest( + request = translation_service.GetSupportedLanguagesRequest( parent="parent_value", - source_language_code="source_language_code_value", - target_language_code="target_language_code_value", + display_language_code="display_language_code_value", model="model_value", - customized_attribution="customized_attribution_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.translate_document), "__call__" + type(client.transport.get_supported_languages), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.translate_document(request=request) + client.get_supported_languages(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.TranslateDocumentRequest( + assert args[0] == translation_service.GetSupportedLanguagesRequest( parent="parent_value", - source_language_code="source_language_code_value", - target_language_code="target_language_code_value", + display_language_code="display_language_code_value", model="model_value", - customized_attribution="customized_attribution_value", ) -def test_translate_document_use_cached_wrapped_rpc(): +def test_get_supported_languages_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2493,7 +2462,8 @@ def test_translate_document_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.translate_document in client._transport._wrapped_methods + client._transport.get_supported_languages + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -2502,15 +2472,15 @@ def test_translate_document_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.translate_document + client._transport.get_supported_languages ] = mock_rpc request = {} - client.translate_document(request) + client.get_supported_languages(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.translate_document(request) + client.get_supported_languages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2518,7 +2488,7 @@ def test_translate_document_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_translate_document_empty_call_async(): +async def test_get_supported_languages_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -2528,22 +2498,20 @@ async def test_translate_document_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.translate_document), "__call__" + type(client.transport.get_supported_languages), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.TranslateDocumentResponse( - model="model_value", - ) + translation_service.SupportedLanguages() ) - response = await client.translate_document() + response = await client.get_supported_languages() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.TranslateDocumentRequest() + assert args[0] == translation_service.GetSupportedLanguagesRequest() @pytest.mark.asyncio -async def test_translate_document_async_use_cached_wrapped_rpc( +async def test_get_supported_languages_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2560,23 +2528,23 @@ async def test_translate_document_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.translate_document + client._client._transport.get_supported_languages in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.translate_document + client._client._transport.get_supported_languages ] = mock_object request = {} - await client.translate_document(request) + await client.get_supported_languages(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.translate_document(request) + await client.get_supported_languages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2584,9 +2552,9 @@ async def test_translate_document_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_translate_document_async( +async def test_get_supported_languages_async( transport: str = "grpc_asyncio", - request_type=translation_service.TranslateDocumentRequest, + request_type=translation_service.GetSupportedLanguagesRequest, ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2599,49 +2567,46 @@ async def test_translate_document_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.translate_document), "__call__" + type(client.transport.get_supported_languages), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.TranslateDocumentResponse( - model="model_value", - ) + translation_service.SupportedLanguages() ) - response = await client.translate_document(request) + response = await client.get_supported_languages(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = translation_service.TranslateDocumentRequest() + request = translation_service.GetSupportedLanguagesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.TranslateDocumentResponse) - assert response.model == "model_value" + assert isinstance(response, translation_service.SupportedLanguages) @pytest.mark.asyncio -async def test_translate_document_async_from_dict(): - await test_translate_document_async(request_type=dict) +async def test_get_supported_languages_async_from_dict(): + await test_get_supported_languages_async(request_type=dict) -def test_translate_document_field_headers(): +def test_get_supported_languages_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.TranslateDocumentRequest() + request = translation_service.GetSupportedLanguagesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.translate_document), "__call__" + type(client.transport.get_supported_languages), "__call__" ) as call: - call.return_value = translation_service.TranslateDocumentResponse() - client.translate_document(request) + call.return_value = translation_service.SupportedLanguages() + client.get_supported_languages(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2657,25 +2622,25 @@ def test_translate_document_field_headers(): @pytest.mark.asyncio -async def test_translate_document_field_headers_async(): +async def test_get_supported_languages_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.TranslateDocumentRequest() + request = translation_service.GetSupportedLanguagesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.translate_document), "__call__" + type(client.transport.get_supported_languages), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.TranslateDocumentResponse() + translation_service.SupportedLanguages() ) - await client.translate_document(request) + await client.get_supported_languages(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2690,100 +2655,215 @@ async def test_translate_document_field_headers_async(): ) in kw["metadata"] -@pytest.mark.parametrize( - "request_type", - [ - translation_service.BatchTranslateTextRequest, - dict, - ], -) -def test_batch_translate_text(request_type, transport: str = "grpc"): +def test_get_supported_languages_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_translate_text), "__call__" + type(client.transport.get_supported_languages), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.batch_translate_text(request) + call.return_value = translation_service.SupportedLanguages() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_supported_languages( + parent="parent_value", + model="model_value", + display_language_code="display_language_code_value", + ) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying call was made with the expected + # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = translation_service.BatchTranslateTextRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].display_language_code + mock_val = "display_language_code_value" + assert arg == mock_val -def test_batch_translate_text_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_get_supported_languages_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_translate_text), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_supported_languages( + translation_service.GetSupportedLanguagesRequest(), + parent="parent_value", + model="model_value", + display_language_code="display_language_code_value", ) - client.batch_translate_text() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.BatchTranslateTextRequest() -def test_batch_translate_text_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_get_supported_languages_flattened_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = translation_service.BatchTranslateTextRequest( - parent="parent_value", - source_language_code="source_language_code_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_translate_text), "__call__" + type(client.transport.get_supported_languages), "__call__" ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Designate an appropriate return value for the call. + call.return_value = translation_service.SupportedLanguages() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + translation_service.SupportedLanguages() ) - client.batch_translate_text(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.BatchTranslateTextRequest( + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_supported_languages( parent="parent_value", - source_language_code="source_language_code_value", + model="model_value", + display_language_code="display_language_code_value", ) - -def test_batch_translate_text_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].display_language_code + mock_val = "display_language_code_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_supported_languages_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_supported_languages( + translation_service.GetSupportedLanguagesRequest(), + parent="parent_value", + model="model_value", + display_language_code="display_language_code_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.TranslateDocumentRequest, + dict, + ], +) +def test_translate_document(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.translate_document), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = translation_service.TranslateDocumentResponse( + model="model_value", + ) + response = client.translate_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = translation_service.TranslateDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.TranslateDocumentResponse) + assert response.model == "model_value" + + +def test_translate_document_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.translate_document), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.translate_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == translation_service.TranslateDocumentRequest() + + +def test_translate_document_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = translation_service.TranslateDocumentRequest( + parent="parent_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + model="model_value", + customized_attribution="customized_attribution_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.translate_document), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.translate_document(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == translation_service.TranslateDocumentRequest( + parent="parent_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + model="model_value", + customized_attribution="customized_attribution_value", + ) + + +def test_translate_document_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -2793,7 +2873,7 @@ def test_batch_translate_text_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.batch_translate_text in client._transport._wrapped_methods + client._transport.translate_document in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -2802,19 +2882,15 @@ def test_batch_translate_text_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.batch_translate_text + client._transport.translate_document ] = mock_rpc request = {} - client.batch_translate_text(request) + client.translate_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.batch_translate_text(request) + client.translate_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2822,7 +2898,7 @@ def test_batch_translate_text_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_batch_translate_text_empty_call_async(): +async def test_translate_document_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -2832,20 +2908,22 @@ async def test_batch_translate_text_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_translate_text), "__call__" + type(client.transport.translate_document), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + translation_service.TranslateDocumentResponse( + model="model_value", + ) ) - response = await client.batch_translate_text() + response = await client.translate_document() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.BatchTranslateTextRequest() + assert args[0] == translation_service.TranslateDocumentRequest() @pytest.mark.asyncio -async def test_batch_translate_text_async_use_cached_wrapped_rpc( +async def test_translate_document_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2862,27 +2940,23 @@ async def test_batch_translate_text_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.batch_translate_text + client._client._transport.translate_document in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.batch_translate_text + client._client._transport.translate_document ] = mock_object request = {} - await client.batch_translate_text(request) + await client.translate_document(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.batch_translate_text(request) + await client.translate_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2890,9 +2964,9 @@ async def test_batch_translate_text_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_batch_translate_text_async( +async def test_translate_document_async( transport: str = "grpc_asyncio", - request_type=translation_service.BatchTranslateTextRequest, + request_type=translation_service.TranslateDocumentRequest, ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2905,46 +2979,49 @@ async def test_batch_translate_text_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_translate_text), "__call__" + type(client.transport.translate_document), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + translation_service.TranslateDocumentResponse( + model="model_value", + ) ) - response = await client.batch_translate_text(request) + response = await client.translate_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = translation_service.BatchTranslateTextRequest() + request = translation_service.TranslateDocumentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, translation_service.TranslateDocumentResponse) + assert response.model == "model_value" @pytest.mark.asyncio -async def test_batch_translate_text_async_from_dict(): - await test_batch_translate_text_async(request_type=dict) +async def test_translate_document_async_from_dict(): + await test_translate_document_async(request_type=dict) -def test_batch_translate_text_field_headers(): +def test_translate_document_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.BatchTranslateTextRequest() + request = translation_service.TranslateDocumentRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_translate_text), "__call__" + type(client.transport.translate_document), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.batch_translate_text(request) + call.return_value = translation_service.TranslateDocumentResponse() + client.translate_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2960,25 +3037,25 @@ def test_batch_translate_text_field_headers(): @pytest.mark.asyncio -async def test_batch_translate_text_field_headers_async(): +async def test_translate_document_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.BatchTranslateTextRequest() + request = translation_service.TranslateDocumentRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_translate_text), "__call__" + type(client.transport.translate_document), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + translation_service.TranslateDocumentResponse() ) - await client.batch_translate_text(request) + await client.translate_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2996,11 +3073,11 @@ async def test_batch_translate_text_field_headers_async(): @pytest.mark.parametrize( "request_type", [ - translation_service.BatchTranslateDocumentRequest, + translation_service.BatchTranslateTextRequest, dict, ], ) -def test_batch_translate_document(request_type, transport: str = "grpc"): +def test_batch_translate_text(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3012,23 +3089,23 @@ def test_batch_translate_document(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_translate_document), "__call__" + type(client.transport.batch_translate_text), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.batch_translate_document(request) + response = client.batch_translate_text(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = translation_service.BatchTranslateDocumentRequest() + request = translation_service.BatchTranslateTextRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_batch_translate_document_empty_call(): +def test_batch_translate_text_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( @@ -3038,18 +3115,18 @@ def test_batch_translate_document_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_translate_document), "__call__" + type(client.transport.batch_translate_text), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.batch_translate_document() + client.batch_translate_text() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.BatchTranslateDocumentRequest() + assert args[0] == translation_service.BatchTranslateTextRequest() -def test_batch_translate_document_non_empty_request_with_auto_populated_field(): +def test_batch_translate_text_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -3060,30 +3137,28 @@ def test_batch_translate_document_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = translation_service.BatchTranslateDocumentRequest( + request = translation_service.BatchTranslateTextRequest( parent="parent_value", source_language_code="source_language_code_value", - customized_attribution="customized_attribution_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_translate_document), "__call__" + type(client.transport.batch_translate_text), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.batch_translate_document(request=request) + client.batch_translate_text(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.BatchTranslateDocumentRequest( + assert args[0] == translation_service.BatchTranslateTextRequest( parent="parent_value", source_language_code="source_language_code_value", - customized_attribution="customized_attribution_value", ) -def test_batch_translate_document_use_cached_wrapped_rpc(): +def test_batch_translate_text_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3098,8 +3173,7 @@ def test_batch_translate_document_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.batch_translate_document - in client._transport._wrapped_methods + client._transport.batch_translate_text in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -3108,10 +3182,10 @@ def test_batch_translate_document_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.batch_translate_document + client._transport.batch_translate_text ] = mock_rpc request = {} - client.batch_translate_document(request) + client.batch_translate_text(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3120,7 +3194,7 @@ def test_batch_translate_document_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.batch_translate_document(request) + client.batch_translate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3128,7 +3202,7 @@ def test_batch_translate_document_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_batch_translate_document_empty_call_async(): +async def test_batch_translate_text_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -3138,20 +3212,20 @@ async def test_batch_translate_document_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_translate_document), "__call__" + type(client.transport.batch_translate_text), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.batch_translate_document() + response = await client.batch_translate_text() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.BatchTranslateDocumentRequest() + assert args[0] == translation_service.BatchTranslateTextRequest() @pytest.mark.asyncio -async def test_batch_translate_document_async_use_cached_wrapped_rpc( +async def test_batch_translate_text_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3168,18 +3242,18 @@ async def test_batch_translate_document_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.batch_translate_document + client._client._transport.batch_translate_text in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.batch_translate_document + client._client._transport.batch_translate_text ] = mock_object request = {} - await client.batch_translate_document(request) + await client.batch_translate_text(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 @@ -3188,7 +3262,7 @@ async def test_batch_translate_document_async_use_cached_wrapped_rpc( # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.batch_translate_document(request) + await client.batch_translate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3196,9 +3270,9 @@ async def test_batch_translate_document_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_batch_translate_document_async( +async def test_batch_translate_text_async( transport: str = "grpc_asyncio", - request_type=translation_service.BatchTranslateDocumentRequest, + request_type=translation_service.BatchTranslateTextRequest, ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3211,18 +3285,18 @@ async def test_batch_translate_document_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_translate_document), "__call__" + type(client.transport.batch_translate_text), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.batch_translate_document(request) + response = await client.batch_translate_text(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = translation_service.BatchTranslateDocumentRequest() + request = translation_service.BatchTranslateTextRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -3230,27 +3304,27 @@ async def test_batch_translate_document_async( @pytest.mark.asyncio -async def test_batch_translate_document_async_from_dict(): - await test_batch_translate_document_async(request_type=dict) +async def test_batch_translate_text_async_from_dict(): + await test_batch_translate_text_async(request_type=dict) -def test_batch_translate_document_field_headers(): +def test_batch_translate_text_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.BatchTranslateDocumentRequest() + request = translation_service.BatchTranslateTextRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_translate_document), "__call__" + type(client.transport.batch_translate_text), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.batch_translate_document(request) + client.batch_translate_text(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3266,25 +3340,25 @@ def test_batch_translate_document_field_headers(): @pytest.mark.asyncio -async def test_batch_translate_document_field_headers_async(): +async def test_batch_translate_text_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.BatchTranslateDocumentRequest() + request = translation_service.BatchTranslateTextRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_translate_document), "__call__" + type(client.transport.batch_translate_text), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.batch_translate_document(request) + await client.batch_translate_text(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3299,196 +3373,14 @@ async def test_batch_translate_document_field_headers_async(): ) in kw["metadata"] -def test_batch_translate_document_flattened(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_translate_document), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.batch_translate_document( - parent="parent_value", - source_language_code="source_language_code_value", - target_language_codes=["target_language_codes_value"], - input_configs=[ - translation_service.BatchDocumentInputConfig( - gcs_source=translation_service.GcsSource( - input_uri="input_uri_value" - ) - ) - ], - output_config=translation_service.BatchDocumentOutputConfig( - gcs_destination=translation_service.GcsDestination( - output_uri_prefix="output_uri_prefix_value" - ) - ), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].source_language_code - mock_val = "source_language_code_value" - assert arg == mock_val - arg = args[0].target_language_codes - mock_val = ["target_language_codes_value"] - assert arg == mock_val - arg = args[0].input_configs - mock_val = [ - translation_service.BatchDocumentInputConfig( - gcs_source=translation_service.GcsSource(input_uri="input_uri_value") - ) - ] - assert arg == mock_val - arg = args[0].output_config - mock_val = translation_service.BatchDocumentOutputConfig( - gcs_destination=translation_service.GcsDestination( - output_uri_prefix="output_uri_prefix_value" - ) - ) - assert arg == mock_val - - -def test_batch_translate_document_flattened_error(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_translate_document( - translation_service.BatchTranslateDocumentRequest(), - parent="parent_value", - source_language_code="source_language_code_value", - target_language_codes=["target_language_codes_value"], - input_configs=[ - translation_service.BatchDocumentInputConfig( - gcs_source=translation_service.GcsSource( - input_uri="input_uri_value" - ) - ) - ], - output_config=translation_service.BatchDocumentOutputConfig( - gcs_destination=translation_service.GcsDestination( - output_uri_prefix="output_uri_prefix_value" - ) - ), - ) - - -@pytest.mark.asyncio -async def test_batch_translate_document_flattened_async(): - client = TranslationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_translate_document), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.batch_translate_document( - parent="parent_value", - source_language_code="source_language_code_value", - target_language_codes=["target_language_codes_value"], - input_configs=[ - translation_service.BatchDocumentInputConfig( - gcs_source=translation_service.GcsSource( - input_uri="input_uri_value" - ) - ) - ], - output_config=translation_service.BatchDocumentOutputConfig( - gcs_destination=translation_service.GcsDestination( - output_uri_prefix="output_uri_prefix_value" - ) - ), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].source_language_code - mock_val = "source_language_code_value" - assert arg == mock_val - arg = args[0].target_language_codes - mock_val = ["target_language_codes_value"] - assert arg == mock_val - arg = args[0].input_configs - mock_val = [ - translation_service.BatchDocumentInputConfig( - gcs_source=translation_service.GcsSource(input_uri="input_uri_value") - ) - ] - assert arg == mock_val - arg = args[0].output_config - mock_val = translation_service.BatchDocumentOutputConfig( - gcs_destination=translation_service.GcsDestination( - output_uri_prefix="output_uri_prefix_value" - ) - ) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_batch_translate_document_flattened_error_async(): - client = TranslationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.batch_translate_document( - translation_service.BatchTranslateDocumentRequest(), - parent="parent_value", - source_language_code="source_language_code_value", - target_language_codes=["target_language_codes_value"], - input_configs=[ - translation_service.BatchDocumentInputConfig( - gcs_source=translation_service.GcsSource( - input_uri="input_uri_value" - ) - ) - ], - output_config=translation_service.BatchDocumentOutputConfig( - gcs_destination=translation_service.GcsDestination( - output_uri_prefix="output_uri_prefix_value" - ) - ), - ) - - @pytest.mark.parametrize( "request_type", [ - translation_service.CreateGlossaryRequest, + translation_service.BatchTranslateDocumentRequest, dict, ], ) -def test_create_glossary(request_type, transport: str = "grpc"): +def test_batch_translate_document(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3499,22 +3391,24 @@ def test_create_glossary(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + with mock.patch.object( + type(client.transport.batch_translate_document), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_glossary(request) + response = client.batch_translate_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = translation_service.CreateGlossaryRequest() + request = translation_service.BatchTranslateDocumentRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_create_glossary_empty_call(): +def test_batch_translate_document_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( @@ -3523,17 +3417,19 @@ def test_create_glossary_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + with mock.patch.object( + type(client.transport.batch_translate_document), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_glossary() + client.batch_translate_document() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.CreateGlossaryRequest() + assert args[0] == translation_service.BatchTranslateDocumentRequest() -def test_create_glossary_non_empty_request_with_auto_populated_field(): +def test_batch_translate_document_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -3544,24 +3440,30 @@ def test_create_glossary_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = translation_service.CreateGlossaryRequest( + request = translation_service.BatchTranslateDocumentRequest( parent="parent_value", - ) + source_language_code="source_language_code_value", + customized_attribution="customized_attribution_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + with mock.patch.object( + type(client.transport.batch_translate_document), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_glossary(request=request) + client.batch_translate_document(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.CreateGlossaryRequest( + assert args[0] == translation_service.BatchTranslateDocumentRequest( parent="parent_value", + source_language_code="source_language_code_value", + customized_attribution="customized_attribution_value", ) -def test_create_glossary_use_cached_wrapped_rpc(): +def test_batch_translate_document_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3575,16 +3477,21 @@ def test_create_glossary_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_glossary in client._transport._wrapped_methods + assert ( + client._transport.batch_translate_document + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_glossary] = mock_rpc + client._transport._wrapped_methods[ + client._transport.batch_translate_document + ] = mock_rpc request = {} - client.create_glossary(request) + client.batch_translate_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3593,7 +3500,7 @@ def test_create_glossary_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_glossary(request) + client.batch_translate_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3601,7 +3508,7 @@ def test_create_glossary_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_glossary_empty_call_async(): +async def test_batch_translate_document_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -3610,19 +3517,21 @@ async def test_create_glossary_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + with mock.patch.object( + type(client.transport.batch_translate_document), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_glossary() + response = await client.batch_translate_document() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.CreateGlossaryRequest() + assert args[0] == translation_service.BatchTranslateDocumentRequest() @pytest.mark.asyncio -async def test_create_glossary_async_use_cached_wrapped_rpc( +async def test_batch_translate_document_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3639,18 +3548,18 @@ async def test_create_glossary_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_glossary + client._client._transport.batch_translate_document in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.create_glossary + client._client._transport.batch_translate_document ] = mock_object request = {} - await client.create_glossary(request) + await client.batch_translate_document(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 @@ -3659,7 +3568,7 @@ async def test_create_glossary_async_use_cached_wrapped_rpc( # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.create_glossary(request) + await client.batch_translate_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3667,9 +3576,9 @@ async def test_create_glossary_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_glossary_async( +async def test_batch_translate_document_async( transport: str = "grpc_asyncio", - request_type=translation_service.CreateGlossaryRequest, + request_type=translation_service.BatchTranslateDocumentRequest, ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3681,17 +3590,19 @@ async def test_create_glossary_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + with mock.patch.object( + type(client.transport.batch_translate_document), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_glossary(request) + response = await client.batch_translate_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = translation_service.CreateGlossaryRequest() + request = translation_service.BatchTranslateDocumentRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -3699,25 +3610,27 @@ async def test_create_glossary_async( @pytest.mark.asyncio -async def test_create_glossary_async_from_dict(): - await test_create_glossary_async(request_type=dict) +async def test_batch_translate_document_async_from_dict(): + await test_batch_translate_document_async(request_type=dict) -def test_create_glossary_field_headers(): +def test_batch_translate_document_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.CreateGlossaryRequest() + request = translation_service.BatchTranslateDocumentRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + with mock.patch.object( + type(client.transport.batch_translate_document), "__call__" + ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_glossary(request) + client.batch_translate_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3733,23 +3646,25 @@ def test_create_glossary_field_headers(): @pytest.mark.asyncio -async def test_create_glossary_field_headers_async(): +async def test_batch_translate_document_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.CreateGlossaryRequest() + request = translation_service.BatchTranslateDocumentRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + with mock.patch.object( + type(client.transport.batch_translate_document), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_glossary(request) + await client.batch_translate_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3764,20 +3679,35 @@ async def test_create_glossary_field_headers_async(): ) in kw["metadata"] -def test_create_glossary_flattened(): +def test_batch_translate_document_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + with mock.patch.object( + type(client.transport.batch_translate_document), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_glossary( + client.batch_translate_document( parent="parent_value", - glossary=translation_service.Glossary(name="name_value"), + source_language_code="source_language_code_value", + target_language_codes=["target_language_codes_value"], + input_configs=[ + translation_service.BatchDocumentInputConfig( + gcs_source=translation_service.GcsSource( + input_uri="input_uri_value" + ) + ) + ], + output_config=translation_service.BatchDocumentOutputConfig( + gcs_destination=translation_service.GcsDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ), ) # Establish that the underlying call was made with the expected @@ -3787,12 +3717,29 @@ def test_create_glossary_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].glossary - mock_val = translation_service.Glossary(name="name_value") + arg = args[0].source_language_code + mock_val = "source_language_code_value" + assert arg == mock_val + arg = args[0].target_language_codes + mock_val = ["target_language_codes_value"] + assert arg == mock_val + arg = args[0].input_configs + mock_val = [ + translation_service.BatchDocumentInputConfig( + gcs_source=translation_service.GcsSource(input_uri="input_uri_value") + ) + ] + assert arg == mock_val + arg = args[0].output_config + mock_val = translation_service.BatchDocumentOutputConfig( + gcs_destination=translation_service.GcsDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ) assert arg == mock_val -def test_create_glossary_flattened_error(): +def test_batch_translate_document_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3800,21 +3747,36 @@ def test_create_glossary_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_glossary( - translation_service.CreateGlossaryRequest(), + client.batch_translate_document( + translation_service.BatchTranslateDocumentRequest(), parent="parent_value", - glossary=translation_service.Glossary(name="name_value"), + source_language_code="source_language_code_value", + target_language_codes=["target_language_codes_value"], + input_configs=[ + translation_service.BatchDocumentInputConfig( + gcs_source=translation_service.GcsSource( + input_uri="input_uri_value" + ) + ) + ], + output_config=translation_service.BatchDocumentOutputConfig( + gcs_destination=translation_service.GcsDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ), ) @pytest.mark.asyncio -async def test_create_glossary_flattened_async(): +async def test_batch_translate_document_flattened_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + with mock.patch.object( + type(client.transport.batch_translate_document), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -3823,9 +3785,22 @@ async def test_create_glossary_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_glossary( + response = await client.batch_translate_document( parent="parent_value", - glossary=translation_service.Glossary(name="name_value"), + source_language_code="source_language_code_value", + target_language_codes=["target_language_codes_value"], + input_configs=[ + translation_service.BatchDocumentInputConfig( + gcs_source=translation_service.GcsSource( + input_uri="input_uri_value" + ) + ) + ], + output_config=translation_service.BatchDocumentOutputConfig( + gcs_destination=translation_service.GcsDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ), ) # Establish that the underlying call was made with the expected @@ -3835,13 +3810,30 @@ async def test_create_glossary_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].glossary - mock_val = translation_service.Glossary(name="name_value") + arg = args[0].source_language_code + mock_val = "source_language_code_value" + assert arg == mock_val + arg = args[0].target_language_codes + mock_val = ["target_language_codes_value"] + assert arg == mock_val + arg = args[0].input_configs + mock_val = [ + translation_service.BatchDocumentInputConfig( + gcs_source=translation_service.GcsSource(input_uri="input_uri_value") + ) + ] + assert arg == mock_val + arg = args[0].output_config + mock_val = translation_service.BatchDocumentOutputConfig( + gcs_destination=translation_service.GcsDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ) assert arg == mock_val @pytest.mark.asyncio -async def test_create_glossary_flattened_error_async(): +async def test_batch_translate_document_flattened_error_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3849,21 +3841,34 @@ async def test_create_glossary_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_glossary( - translation_service.CreateGlossaryRequest(), + await client.batch_translate_document( + translation_service.BatchTranslateDocumentRequest(), parent="parent_value", - glossary=translation_service.Glossary(name="name_value"), + source_language_code="source_language_code_value", + target_language_codes=["target_language_codes_value"], + input_configs=[ + translation_service.BatchDocumentInputConfig( + gcs_source=translation_service.GcsSource( + input_uri="input_uri_value" + ) + ) + ], + output_config=translation_service.BatchDocumentOutputConfig( + gcs_destination=translation_service.GcsDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ), ) @pytest.mark.parametrize( "request_type", [ - translation_service.ListGlossariesRequest, + translation_service.CreateGlossaryRequest, dict, ], ) -def test_list_glossaries(request_type, transport: str = "grpc"): +def test_create_glossary(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3874,25 +3879,22 @@ def test_list_glossaries(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = translation_service.ListGlossariesResponse( - next_page_token="next_page_token_value", - ) - response = client.list_glossaries(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_glossary(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = translation_service.ListGlossariesRequest() + request = translation_service.CreateGlossaryRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListGlossariesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, future.Future) -def test_list_glossaries_empty_call(): +def test_create_glossary_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( @@ -3901,17 +3903,17 @@ def test_list_glossaries_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_glossaries() + client.create_glossary() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.ListGlossariesRequest() + assert args[0] == translation_service.CreateGlossaryRequest() -def test_list_glossaries_non_empty_request_with_auto_populated_field(): +def test_create_glossary_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -3922,28 +3924,24 @@ def test_list_glossaries_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = translation_service.ListGlossariesRequest( + request = translation_service.CreateGlossaryRequest( parent="parent_value", - page_token="page_token_value", - filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_glossaries(request=request) + client.create_glossary(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.ListGlossariesRequest( + assert args[0] == translation_service.CreateGlossaryRequest( parent="parent_value", - page_token="page_token_value", - filter="filter_value", ) -def test_list_glossaries_use_cached_wrapped_rpc(): +def test_create_glossary_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3957,21 +3955,25 @@ def test_list_glossaries_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_glossaries in client._transport._wrapped_methods + assert client._transport.create_glossary in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_glossaries] = mock_rpc + client._transport._wrapped_methods[client._transport.create_glossary] = mock_rpc request = {} - client.list_glossaries(request) + client.create_glossary(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_glossaries(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_glossary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3979,7 +3981,7 @@ def test_list_glossaries_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_glossaries_empty_call_async(): +async def test_create_glossary_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -3988,21 +3990,19 @@ async def test_list_glossaries_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.ListGlossariesResponse( - next_page_token="next_page_token_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_glossaries() + response = await client.create_glossary() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.ListGlossariesRequest() + assert args[0] == translation_service.CreateGlossaryRequest() @pytest.mark.asyncio -async def test_list_glossaries_async_use_cached_wrapped_rpc( +async def test_create_glossary_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4019,23 +4019,27 @@ async def test_list_glossaries_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_glossaries + client._client._transport.create_glossary in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.list_glossaries + client._client._transport.create_glossary ] = mock_object request = {} - await client.list_glossaries(request) + await client.create_glossary(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.list_glossaries(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_glossary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4043,9 +4047,9 @@ async def test_list_glossaries_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_glossaries_async( +async def test_create_glossary_async( transport: str = "grpc_asyncio", - request_type=translation_service.ListGlossariesRequest, + request_type=translation_service.CreateGlossaryRequest, ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4057,46 +4061,43 @@ async def test_list_glossaries_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.ListGlossariesResponse( - next_page_token="next_page_token_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_glossaries(request) + response = await client.create_glossary(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = translation_service.ListGlossariesRequest() + request = translation_service.CreateGlossaryRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListGlossariesAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_glossaries_async_from_dict(): - await test_list_glossaries_async(request_type=dict) +async def test_create_glossary_async_from_dict(): + await test_create_glossary_async(request_type=dict) -def test_list_glossaries_field_headers(): +def test_create_glossary_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.ListGlossariesRequest() + request = translation_service.CreateGlossaryRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: - call.return_value = translation_service.ListGlossariesResponse() - client.list_glossaries(request) + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_glossary(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4112,23 +4113,23 @@ def test_list_glossaries_field_headers(): @pytest.mark.asyncio -async def test_list_glossaries_field_headers_async(): +async def test_create_glossary_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.ListGlossariesRequest() + request = translation_service.CreateGlossaryRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.ListGlossariesResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_glossaries(request) + await client.create_glossary(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4143,19 +4144,20 @@ async def test_list_glossaries_field_headers_async(): ) in kw["metadata"] -def test_list_glossaries_flattened(): +def test_create_glossary_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = translation_service.ListGlossariesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_glossaries( + client.create_glossary( parent="parent_value", + glossary=translation_service.Glossary(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -4165,9 +4167,12 @@ def test_list_glossaries_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val + arg = args[0].glossary + mock_val = translation_service.Glossary(name="name_value") + assert arg == mock_val -def test_list_glossaries_flattened_error(): +def test_create_glossary_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4175,30 +4180,32 @@ def test_list_glossaries_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_glossaries( - translation_service.ListGlossariesRequest(), + client.create_glossary( + translation_service.CreateGlossaryRequest(), parent="parent_value", + glossary=translation_service.Glossary(name="name_value"), ) @pytest.mark.asyncio -async def test_list_glossaries_flattened_async(): +async def test_create_glossary_flattened_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = translation_service.ListGlossariesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.ListGlossariesResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_glossaries( + response = await client.create_glossary( parent="parent_value", + glossary=translation_service.Glossary(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -4208,10 +4215,13 @@ async def test_list_glossaries_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val + arg = args[0].glossary + mock_val = translation_service.Glossary(name="name_value") + assert arg == mock_val @pytest.mark.asyncio -async def test_list_glossaries_flattened_error_async(): +async def test_create_glossary_flattened_error_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4219,294 +4229,90 @@ async def test_list_glossaries_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_glossaries( - translation_service.ListGlossariesRequest(), + await client.create_glossary( + translation_service.CreateGlossaryRequest(), parent="parent_value", + glossary=translation_service.Glossary(name="name_value"), ) -def test_list_glossaries_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + translation_service.UpdateGlossaryRequest, + dict, + ], +) +def test_update_glossary(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - translation_service.Glossary(), - translation_service.Glossary(), - ], - next_page_token="abc", - ), - translation_service.ListGlossariesResponse( - glossaries=[], - next_page_token="def", - ), - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - ], - next_page_token="ghi", - ), - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - translation_service.Glossary(), - ], - ), - RuntimeError, - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_glossaries(request={}, retry=retry, timeout=timeout) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_glossary(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = translation_service.UpdateGlossaryRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, translation_service.Glossary) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_glossaries_pages(transport_name: str = "grpc"): +def test_update_glossary_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - translation_service.Glossary(), - translation_service.Glossary(), - ], - next_page_token="abc", - ), - translation_service.ListGlossariesResponse( - glossaries=[], - next_page_token="def", - ), - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - ], - next_page_token="ghi", - ), - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - translation_service.Glossary(), - ], - ), - RuntimeError, + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_glossaries(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.update_glossary() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == translation_service.UpdateGlossaryRequest() -@pytest.mark.asyncio -async def test_list_glossaries_async_pager(): - client = TranslationServiceAsyncClient( +def test_update_glossary_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = translation_service.UpdateGlossaryRequest() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_glossaries), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - translation_service.Glossary(), - translation_service.Glossary(), - ], - next_page_token="abc", - ), - translation_service.ListGlossariesResponse( - glossaries=[], - next_page_token="def", - ), - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - ], - next_page_token="ghi", - ), - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - translation_service.Glossary(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_glossaries( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, translation_service.Glossary) for i in responses) - - -@pytest.mark.asyncio -async def test_list_glossaries_async_pages(): - client = TranslationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_glossaries), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - translation_service.Glossary(), - translation_service.Glossary(), - ], - next_page_token="abc", - ), - translation_service.ListGlossariesResponse( - glossaries=[], - next_page_token="def", - ), - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - ], - next_page_token="ghi", - ), - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - translation_service.Glossary(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_glossaries(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - translation_service.GetGlossaryRequest, - dict, - ], -) -def test_get_glossary(request_type, transport: str = "grpc"): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = translation_service.Glossary( - name="name_value", - entry_count=1210, - display_name="display_name_value", - ) - response = client.get_glossary(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = translation_service.GetGlossaryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.Glossary) - assert response.name == "name_value" - assert response.entry_count == 1210 - assert response.display_name == "display_name_value" - - -def test_get_glossary_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_glossary() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.GetGlossaryRequest() - - -def test_get_glossary_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = translation_service.GetGlossaryRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_glossary(request=request) + client.update_glossary(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.GetGlossaryRequest( - name="name_value", - ) + assert args[0] == translation_service.UpdateGlossaryRequest() -def test_get_glossary_use_cached_wrapped_rpc(): +def test_update_glossary_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4520,21 +4326,25 @@ def test_get_glossary_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_glossary in client._transport._wrapped_methods + assert client._transport.update_glossary in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_glossary] = mock_rpc + client._transport._wrapped_methods[client._transport.update_glossary] = mock_rpc request = {} - client.get_glossary(request) + client.update_glossary(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_glossary(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_glossary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4542,7 +4352,7 @@ def test_get_glossary_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_glossary_empty_call_async(): +async def test_update_glossary_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -4551,23 +4361,19 @@ async def test_get_glossary_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.Glossary( - name="name_value", - entry_count=1210, - display_name="display_name_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_glossary() + response = await client.update_glossary() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.GetGlossaryRequest() + assert args[0] == translation_service.UpdateGlossaryRequest() @pytest.mark.asyncio -async def test_get_glossary_async_use_cached_wrapped_rpc( +async def test_update_glossary_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4584,23 +4390,27 @@ async def test_get_glossary_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_glossary + client._client._transport.update_glossary in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.get_glossary + client._client._transport.update_glossary ] = mock_object request = {} - await client.get_glossary(request) + await client.update_glossary(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.get_glossary(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_glossary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4608,8 +4418,9 @@ async def test_get_glossary_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_glossary_async( - transport: str = "grpc_asyncio", request_type=translation_service.GetGlossaryRequest +async def test_update_glossary_async( + transport: str = "grpc_asyncio", + request_type=translation_service.UpdateGlossaryRequest, ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4621,50 +4432,43 @@ async def test_get_glossary_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.Glossary( - name="name_value", - entry_count=1210, - display_name="display_name_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_glossary(request) + response = await client.update_glossary(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = translation_service.GetGlossaryRequest() + request = translation_service.UpdateGlossaryRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.Glossary) - assert response.name == "name_value" - assert response.entry_count == 1210 - assert response.display_name == "display_name_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_glossary_async_from_dict(): - await test_get_glossary_async(request_type=dict) +async def test_update_glossary_async_from_dict(): + await test_update_glossary_async(request_type=dict) -def test_get_glossary_field_headers(): +def test_update_glossary_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.GetGlossaryRequest() + request = translation_service.UpdateGlossaryRequest() - request.name = "name_value" + request.glossary.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: - call.return_value = translation_service.Glossary() - client.get_glossary(request) + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_glossary(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4675,28 +4479,28 @@ def test_get_glossary_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "glossary.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_glossary_field_headers_async(): +async def test_update_glossary_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.GetGlossaryRequest() + request = translation_service.UpdateGlossaryRequest() - request.name = "name_value" + request.glossary.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.Glossary() + operations_pb2.Operation(name="operations/op") ) - await client.get_glossary(request) + await client.update_glossary(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4707,35 +4511,39 @@ async def test_get_glossary_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "glossary.name=name_value", ) in kw["metadata"] -def test_get_glossary_flattened(): +def test_update_glossary_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = translation_service.Glossary() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_glossary( - name="name_value", + client.update_glossary( + glossary=translation_service.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].glossary + mock_val = translation_service.Glossary(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_glossary_flattened_error(): +def test_update_glossary_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4743,43 +4551,48 @@ def test_get_glossary_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_glossary( - translation_service.GetGlossaryRequest(), - name="name_value", + client.update_glossary( + translation_service.UpdateGlossaryRequest(), + glossary=translation_service.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_glossary_flattened_async(): +async def test_update_glossary_flattened_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = translation_service.Glossary() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - translation_service.Glossary() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_glossary( - name="name_value", + response = await client.update_glossary( + glossary=translation_service.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].glossary + mock_val = translation_service.Glossary(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_glossary_flattened_error_async(): +async def test_update_glossary_flattened_error_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4787,20 +4600,21 @@ async def test_get_glossary_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_glossary( - translation_service.GetGlossaryRequest(), - name="name_value", + await client.update_glossary( + translation_service.UpdateGlossaryRequest(), + glossary=translation_service.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - translation_service.DeleteGlossaryRequest, + translation_service.ListGlossariesRequest, dict, ], ) -def test_delete_glossary(request_type, transport: str = "grpc"): +def test_list_glossaries(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4811,22 +4625,25 @@ def test_delete_glossary(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_glossary(request) + call.return_value = translation_service.ListGlossariesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_glossaries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = translation_service.DeleteGlossaryRequest() + request = translation_service.ListGlossariesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListGlossariesPager) + assert response.next_page_token == "next_page_token_value" -def test_delete_glossary_empty_call(): +def test_list_glossaries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( @@ -4835,17 +4652,17 @@ def test_delete_glossary_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_glossary() + client.list_glossaries() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.DeleteGlossaryRequest() + assert args[0] == translation_service.ListGlossariesRequest() -def test_delete_glossary_non_empty_request_with_auto_populated_field(): +def test_list_glossaries_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -4856,24 +4673,28 @@ def test_delete_glossary_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = translation_service.DeleteGlossaryRequest( - name="name_value", + request = translation_service.ListGlossariesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_glossary(request=request) + client.list_glossaries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.DeleteGlossaryRequest( - name="name_value", + assert args[0] == translation_service.ListGlossariesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", ) -def test_delete_glossary_use_cached_wrapped_rpc(): +def test_list_glossaries_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4887,25 +4708,21 @@ def test_delete_glossary_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_glossary in client._transport._wrapped_methods + assert client._transport.list_glossaries in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_glossary] = mock_rpc + client._transport._wrapped_methods[client._transport.list_glossaries] = mock_rpc request = {} - client.delete_glossary(request) + client.list_glossaries(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_glossary(request) + client.list_glossaries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4913,7 +4730,7 @@ def test_delete_glossary_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_glossary_empty_call_async(): +async def test_list_glossaries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -4922,19 +4739,21 @@ async def test_delete_glossary_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + translation_service.ListGlossariesResponse( + next_page_token="next_page_token_value", + ) ) - response = await client.delete_glossary() + response = await client.list_glossaries() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.DeleteGlossaryRequest() + assert args[0] == translation_service.ListGlossariesRequest() @pytest.mark.asyncio -async def test_delete_glossary_async_use_cached_wrapped_rpc( +async def test_list_glossaries_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4951,27 +4770,23 @@ async def test_delete_glossary_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_glossary + client._client._transport.list_glossaries in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.delete_glossary + client._client._transport.list_glossaries ] = mock_object request = {} - await client.delete_glossary(request) + await client.list_glossaries(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_glossary(request) + await client.list_glossaries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4979,9 +4794,9 @@ async def test_delete_glossary_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_glossary_async( +async def test_list_glossaries_async( transport: str = "grpc_asyncio", - request_type=translation_service.DeleteGlossaryRequest, + request_type=translation_service.ListGlossariesRequest, ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4993,43 +4808,46 @@ async def test_delete_glossary_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + translation_service.ListGlossariesResponse( + next_page_token="next_page_token_value", + ) ) - response = await client.delete_glossary(request) + response = await client.list_glossaries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = translation_service.DeleteGlossaryRequest() + request = translation_service.ListGlossariesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListGlossariesAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_delete_glossary_async_from_dict(): - await test_delete_glossary_async(request_type=dict) +async def test_list_glossaries_async_from_dict(): + await test_list_glossaries_async(request_type=dict) -def test_delete_glossary_field_headers(): +def test_list_glossaries_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.DeleteGlossaryRequest() + request = translation_service.ListGlossariesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_glossary(request) + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + call.return_value = translation_service.ListGlossariesResponse() + client.list_glossaries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5040,28 +4858,28 @@ def test_delete_glossary_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_glossary_field_headers_async(): +async def test_list_glossaries_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = translation_service.DeleteGlossaryRequest() + request = translation_service.ListGlossariesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + translation_service.ListGlossariesResponse() ) - await client.delete_glossary(request) + await client.list_glossaries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5072,35 +4890,35 @@ async def test_delete_glossary_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_glossary_flattened(): +def test_list_glossaries_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = translation_service.ListGlossariesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_glossary( - name="name_value", + client.list_glossaries( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_delete_glossary_flattened_error(): +def test_list_glossaries_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5108,43 +4926,43 @@ def test_delete_glossary_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_glossary( - translation_service.DeleteGlossaryRequest(), - name="name_value", + client.list_glossaries( + translation_service.ListGlossariesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_delete_glossary_flattened_async(): +async def test_list_glossaries_flattened_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = translation_service.ListGlossariesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + translation_service.ListGlossariesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_glossary( - name="name_value", + response = await client.list_glossaries( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_delete_glossary_flattened_error_async(): +async def test_list_glossaries_flattened_error_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5152,80 +4970,266 @@ async def test_delete_glossary_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_glossary( - translation_service.DeleteGlossaryRequest(), - name="name_value", + await client.list_glossaries( + translation_service.ListGlossariesRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - adaptive_mt.CreateAdaptiveMtDatasetRequest, - dict, - ], -) -def test_create_adaptive_mt_dataset(request_type, transport: str = "grpc"): +def test_list_glossaries_pager(transport_name: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_adaptive_mt_dataset), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.AdaptiveMtDataset( - name="name_value", - display_name="display_name_value", - source_language_code="source_language_code_value", - target_language_code="target_language_code_value", - example_count=1396, + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + translation_service.Glossary(), + translation_service.Glossary(), + ], + next_page_token="abc", + ), + translation_service.ListGlossariesResponse( + glossaries=[], + next_page_token="def", + ), + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + ], + next_page_token="ghi", + ), + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + translation_service.Glossary(), + ], + ), + RuntimeError, ) - response = client.create_adaptive_mt_dataset(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = adaptive_mt.CreateAdaptiveMtDatasetRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_glossaries(request={}, retry=retry, timeout=timeout) - # Establish that the response is the type that we expect. - assert isinstance(response, adaptive_mt.AdaptiveMtDataset) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_language_code == "source_language_code_value" - assert response.target_language_code == "target_language_code_value" - assert response.example_count == 1396 + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, translation_service.Glossary) for i in results) -def test_create_adaptive_mt_dataset_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_glossaries_pages(transport_name: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + translation_service.Glossary(), + translation_service.Glossary(), + ], + next_page_token="abc", + ), + translation_service.ListGlossariesResponse( + glossaries=[], + next_page_token="def", + ), + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + ], + next_page_token="ghi", + ), + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + translation_service.Glossary(), + ], + ), + RuntimeError, + ) + pages = list(client.list_glossaries(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_glossaries_async_pager(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_adaptive_mt_dataset), "__call__" + type(client.transport.list_glossaries), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + translation_service.Glossary(), + translation_service.Glossary(), + ], + next_page_token="abc", + ), + translation_service.ListGlossariesResponse( + glossaries=[], + next_page_token="def", + ), + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + ], + next_page_token="ghi", + ), + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + translation_service.Glossary(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_glossaries( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, translation_service.Glossary) for i in responses) + + +@pytest.mark.asyncio +async def test_list_glossaries_async_pages(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), "__call__", new_callable=mock.AsyncMock ) as call: + # Set the response to a series of pages. + call.side_effect = ( + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + translation_service.Glossary(), + translation_service.Glossary(), + ], + next_page_token="abc", + ), + translation_service.ListGlossariesResponse( + glossaries=[], + next_page_token="def", + ), + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + ], + next_page_token="ghi", + ), + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + translation_service.Glossary(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_glossaries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.GetGlossaryRequest, + dict, + ], +) +def test_get_glossary(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = translation_service.Glossary( + name="name_value", + entry_count=1210, + display_name="display_name_value", + ) + response = client.get_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = translation_service.GetGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.Glossary) + assert response.name == "name_value" + assert response.entry_count == 1210 + assert response.display_name == "display_name_value" + + +def test_get_glossary_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_adaptive_mt_dataset() + client.get_glossary() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.CreateAdaptiveMtDatasetRequest() + assert args[0] == translation_service.GetGlossaryRequest() -def test_create_adaptive_mt_dataset_non_empty_request_with_auto_populated_field(): +def test_get_glossary_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -5236,26 +5240,24 @@ def test_create_adaptive_mt_dataset_non_empty_request_with_auto_populated_field( # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = adaptive_mt.CreateAdaptiveMtDatasetRequest( - parent="parent_value", + request = translation_service.GetGlossaryRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_adaptive_mt_dataset), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_adaptive_mt_dataset(request=request) + client.get_glossary(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.CreateAdaptiveMtDatasetRequest( - parent="parent_value", + assert args[0] == translation_service.GetGlossaryRequest( + name="name_value", ) -def test_create_adaptive_mt_dataset_use_cached_wrapped_rpc(): +def test_get_glossary_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5269,26 +5271,21 @@ def test_create_adaptive_mt_dataset_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_adaptive_mt_dataset - in client._transport._wrapped_methods - ) + assert client._transport.get_glossary in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_adaptive_mt_dataset - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_glossary] = mock_rpc request = {} - client.create_adaptive_mt_dataset(request) + client.get_glossary(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_adaptive_mt_dataset(request) + client.get_glossary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5296,7 +5293,7 @@ def test_create_adaptive_mt_dataset_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_adaptive_mt_dataset_empty_call_async(): +async def test_get_glossary_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -5305,27 +5302,23 @@ async def test_create_adaptive_mt_dataset_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_adaptive_mt_dataset), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.AdaptiveMtDataset( + translation_service.Glossary( name="name_value", + entry_count=1210, display_name="display_name_value", - source_language_code="source_language_code_value", - target_language_code="target_language_code_value", - example_count=1396, ) ) - response = await client.create_adaptive_mt_dataset() + response = await client.get_glossary() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.CreateAdaptiveMtDatasetRequest() + assert args[0] == translation_service.GetGlossaryRequest() @pytest.mark.asyncio -async def test_create_adaptive_mt_dataset_async_use_cached_wrapped_rpc( +async def test_get_glossary_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5342,23 +5335,23 @@ async def test_create_adaptive_mt_dataset_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_adaptive_mt_dataset + client._client._transport.get_glossary in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.create_adaptive_mt_dataset + client._client._transport.get_glossary ] = mock_object request = {} - await client.create_adaptive_mt_dataset(request) + await client.get_glossary(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.create_adaptive_mt_dataset(request) + await client.get_glossary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5366,9 +5359,8 @@ async def test_create_adaptive_mt_dataset_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_adaptive_mt_dataset_async( - transport: str = "grpc_asyncio", - request_type=adaptive_mt.CreateAdaptiveMtDatasetRequest, +async def test_get_glossary_async( + transport: str = "grpc_asyncio", request_type=translation_service.GetGlossaryRequest ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5380,58 +5372,50 @@ async def test_create_adaptive_mt_dataset_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_adaptive_mt_dataset), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.AdaptiveMtDataset( + translation_service.Glossary( name="name_value", + entry_count=1210, display_name="display_name_value", - source_language_code="source_language_code_value", - target_language_code="target_language_code_value", - example_count=1396, ) ) - response = await client.create_adaptive_mt_dataset(request) + response = await client.get_glossary(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = adaptive_mt.CreateAdaptiveMtDatasetRequest() + request = translation_service.GetGlossaryRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, adaptive_mt.AdaptiveMtDataset) + assert isinstance(response, translation_service.Glossary) assert response.name == "name_value" + assert response.entry_count == 1210 assert response.display_name == "display_name_value" - assert response.source_language_code == "source_language_code_value" - assert response.target_language_code == "target_language_code_value" - assert response.example_count == 1396 @pytest.mark.asyncio -async def test_create_adaptive_mt_dataset_async_from_dict(): - await test_create_adaptive_mt_dataset_async(request_type=dict) +async def test_get_glossary_async_from_dict(): + await test_get_glossary_async(request_type=dict) -def test_create_adaptive_mt_dataset_field_headers(): +def test_get_glossary_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.CreateAdaptiveMtDatasetRequest() + request = translation_service.GetGlossaryRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_adaptive_mt_dataset), "__call__" - ) as call: - call.return_value = adaptive_mt.AdaptiveMtDataset() - client.create_adaptive_mt_dataset(request) + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + call.return_value = translation_service.Glossary() + client.get_glossary(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5442,30 +5426,28 @@ def test_create_adaptive_mt_dataset_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_adaptive_mt_dataset_field_headers_async(): +async def test_get_glossary_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.CreateAdaptiveMtDatasetRequest() + request = translation_service.GetGlossaryRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_adaptive_mt_dataset), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.AdaptiveMtDataset() + translation_service.Glossary() ) - await client.create_adaptive_mt_dataset(request) + await client.get_glossary(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5476,41 +5458,35 @@ async def test_create_adaptive_mt_dataset_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_adaptive_mt_dataset_flattened(): +def test_get_glossary_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_adaptive_mt_dataset), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.AdaptiveMtDataset() + call.return_value = translation_service.Glossary() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_adaptive_mt_dataset( - parent="parent_value", - adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), + client.get_glossary( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].adaptive_mt_dataset - mock_val = adaptive_mt.AdaptiveMtDataset(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_adaptive_mt_dataset_flattened_error(): +def test_get_glossary_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5518,50 +5494,43 @@ def test_create_adaptive_mt_dataset_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_adaptive_mt_dataset( - adaptive_mt.CreateAdaptiveMtDatasetRequest(), - parent="parent_value", - adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), + client.get_glossary( + translation_service.GetGlossaryRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_adaptive_mt_dataset_flattened_async(): +async def test_get_glossary_flattened_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_adaptive_mt_dataset), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.AdaptiveMtDataset() + call.return_value = translation_service.Glossary() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.AdaptiveMtDataset() + translation_service.Glossary() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_adaptive_mt_dataset( - parent="parent_value", - adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), + response = await client.get_glossary( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].adaptive_mt_dataset - mock_val = adaptive_mt.AdaptiveMtDataset(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_adaptive_mt_dataset_flattened_error_async(): +async def test_get_glossary_flattened_error_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5569,21 +5538,20 @@ async def test_create_adaptive_mt_dataset_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_adaptive_mt_dataset( - adaptive_mt.CreateAdaptiveMtDatasetRequest(), - parent="parent_value", - adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), + await client.get_glossary( + translation_service.GetGlossaryRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - adaptive_mt.DeleteAdaptiveMtDatasetRequest, + translation_service.DeleteGlossaryRequest, dict, ], ) -def test_delete_adaptive_mt_dataset(request_type, transport: str = "grpc"): +def test_delete_glossary(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5594,24 +5562,22 @@ def test_delete_adaptive_mt_dataset(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_adaptive_mt_dataset), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_adaptive_mt_dataset(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_glossary(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = adaptive_mt.DeleteAdaptiveMtDatasetRequest() + request = translation_service.DeleteGlossaryRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, future.Future) -def test_delete_adaptive_mt_dataset_empty_call(): +def test_delete_glossary_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( @@ -5620,19 +5586,17 @@ def test_delete_adaptive_mt_dataset_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_adaptive_mt_dataset), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_adaptive_mt_dataset() + client.delete_glossary() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.DeleteAdaptiveMtDatasetRequest() + assert args[0] == translation_service.DeleteGlossaryRequest() -def test_delete_adaptive_mt_dataset_non_empty_request_with_auto_populated_field(): +def test_delete_glossary_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -5643,26 +5607,24 @@ def test_delete_adaptive_mt_dataset_non_empty_request_with_auto_populated_field( # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = adaptive_mt.DeleteAdaptiveMtDatasetRequest( + request = translation_service.DeleteGlossaryRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_adaptive_mt_dataset), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_adaptive_mt_dataset(request=request) + client.delete_glossary(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.DeleteAdaptiveMtDatasetRequest( + assert args[0] == translation_service.DeleteGlossaryRequest( name="name_value", ) -def test_delete_adaptive_mt_dataset_use_cached_wrapped_rpc(): +def test_delete_glossary_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5676,26 +5638,25 @@ def test_delete_adaptive_mt_dataset_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_adaptive_mt_dataset - in client._transport._wrapped_methods - ) + assert client._transport.delete_glossary in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_adaptive_mt_dataset - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_glossary] = mock_rpc request = {} - client.delete_adaptive_mt_dataset(request) + client.delete_glossary(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_adaptive_mt_dataset(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_glossary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5703,7 +5664,7 @@ def test_delete_adaptive_mt_dataset_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_adaptive_mt_dataset_empty_call_async(): +async def test_delete_glossary_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -5712,19 +5673,19 @@ async def test_delete_adaptive_mt_dataset_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_adaptive_mt_dataset), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_adaptive_mt_dataset() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_glossary() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.DeleteAdaptiveMtDatasetRequest() + assert args[0] == translation_service.DeleteGlossaryRequest() @pytest.mark.asyncio -async def test_delete_adaptive_mt_dataset_async_use_cached_wrapped_rpc( +async def test_delete_glossary_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5741,23 +5702,27 @@ async def test_delete_adaptive_mt_dataset_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_adaptive_mt_dataset + client._client._transport.delete_glossary in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.delete_adaptive_mt_dataset + client._client._transport.delete_glossary ] = mock_object request = {} - await client.delete_adaptive_mt_dataset(request) + await client.delete_glossary(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.delete_adaptive_mt_dataset(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_glossary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5765,9 +5730,9 @@ async def test_delete_adaptive_mt_dataset_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_adaptive_mt_dataset_async( +async def test_delete_glossary_async( transport: str = "grpc_asyncio", - request_type=adaptive_mt.DeleteAdaptiveMtDatasetRequest, + request_type=translation_service.DeleteGlossaryRequest, ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5779,45 +5744,43 @@ async def test_delete_adaptive_mt_dataset_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_adaptive_mt_dataset), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_adaptive_mt_dataset(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_glossary(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = adaptive_mt.DeleteAdaptiveMtDatasetRequest() + request = translation_service.DeleteGlossaryRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_delete_adaptive_mt_dataset_async_from_dict(): - await test_delete_adaptive_mt_dataset_async(request_type=dict) +async def test_delete_glossary_async_from_dict(): + await test_delete_glossary_async(request_type=dict) -def test_delete_adaptive_mt_dataset_field_headers(): +def test_delete_glossary_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.DeleteAdaptiveMtDatasetRequest() + request = translation_service.DeleteGlossaryRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_adaptive_mt_dataset), "__call__" - ) as call: - call.return_value = None - client.delete_adaptive_mt_dataset(request) + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_glossary(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5833,23 +5796,23 @@ def test_delete_adaptive_mt_dataset_field_headers(): @pytest.mark.asyncio -async def test_delete_adaptive_mt_dataset_field_headers_async(): +async def test_delete_glossary_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.DeleteAdaptiveMtDatasetRequest() + request = translation_service.DeleteGlossaryRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_adaptive_mt_dataset), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_adaptive_mt_dataset(request) + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_glossary(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5864,20 +5827,18 @@ async def test_delete_adaptive_mt_dataset_field_headers_async(): ) in kw["metadata"] -def test_delete_adaptive_mt_dataset_flattened(): +def test_delete_glossary_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_adaptive_mt_dataset), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_adaptive_mt_dataset( + client.delete_glossary( name="name_value", ) @@ -5890,7 +5851,7 @@ def test_delete_adaptive_mt_dataset_flattened(): assert arg == mock_val -def test_delete_adaptive_mt_dataset_flattened_error(): +def test_delete_glossary_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5898,29 +5859,29 @@ def test_delete_adaptive_mt_dataset_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_adaptive_mt_dataset( - adaptive_mt.DeleteAdaptiveMtDatasetRequest(), - name="name_value", + client.delete_glossary( + translation_service.DeleteGlossaryRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_delete_adaptive_mt_dataset_flattened_async(): +async def test_delete_glossary_flattened_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_adaptive_mt_dataset), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_adaptive_mt_dataset( + response = await client.delete_glossary( name="name_value", ) @@ -5934,7 +5895,7 @@ async def test_delete_adaptive_mt_dataset_flattened_async(): @pytest.mark.asyncio -async def test_delete_adaptive_mt_dataset_flattened_error_async(): +async def test_delete_glossary_flattened_error_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5942,8 +5903,8 @@ async def test_delete_adaptive_mt_dataset_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_adaptive_mt_dataset( - adaptive_mt.DeleteAdaptiveMtDatasetRequest(), + await client.delete_glossary( + translation_service.DeleteGlossaryRequest(), name="name_value", ) @@ -5951,11 +5912,11 @@ async def test_delete_adaptive_mt_dataset_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - adaptive_mt.GetAdaptiveMtDatasetRequest, + translation_service.GetGlossaryEntryRequest, dict, ], ) -def test_get_adaptive_mt_dataset(request_type, transport: str = "grpc"): +def test_get_glossary_entry(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5967,34 +5928,28 @@ def test_get_adaptive_mt_dataset(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_dataset), "__call__" + type(client.transport.get_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.AdaptiveMtDataset( + call.return_value = common.GlossaryEntry( name="name_value", - display_name="display_name_value", - source_language_code="source_language_code_value", - target_language_code="target_language_code_value", - example_count=1396, + description="description_value", ) - response = client.get_adaptive_mt_dataset(request) + response = client.get_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = adaptive_mt.GetAdaptiveMtDatasetRequest() + request = translation_service.GetGlossaryEntryRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, adaptive_mt.AdaptiveMtDataset) + assert isinstance(response, common.GlossaryEntry) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_language_code == "source_language_code_value" - assert response.target_language_code == "target_language_code_value" - assert response.example_count == 1396 + assert response.description == "description_value" -def test_get_adaptive_mt_dataset_empty_call(): +def test_get_glossary_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( @@ -6004,18 +5959,18 @@ def test_get_adaptive_mt_dataset_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_dataset), "__call__" + type(client.transport.get_glossary_entry), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_adaptive_mt_dataset() + client.get_glossary_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.GetAdaptiveMtDatasetRequest() + assert args[0] == translation_service.GetGlossaryEntryRequest() -def test_get_adaptive_mt_dataset_non_empty_request_with_auto_populated_field(): +def test_get_glossary_entry_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -6026,26 +5981,26 @@ def test_get_adaptive_mt_dataset_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = adaptive_mt.GetAdaptiveMtDatasetRequest( + request = translation_service.GetGlossaryEntryRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_dataset), "__call__" + type(client.transport.get_glossary_entry), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_adaptive_mt_dataset(request=request) + client.get_glossary_entry(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.GetAdaptiveMtDatasetRequest( + assert args[0] == translation_service.GetGlossaryEntryRequest( name="name_value", ) -def test_get_adaptive_mt_dataset_use_cached_wrapped_rpc(): +def test_get_glossary_entry_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6060,8 +6015,7 @@ def test_get_adaptive_mt_dataset_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_adaptive_mt_dataset - in client._transport._wrapped_methods + client._transport.get_glossary_entry in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -6070,15 +6024,15 @@ def test_get_adaptive_mt_dataset_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_adaptive_mt_dataset + client._transport.get_glossary_entry ] = mock_rpc request = {} - client.get_adaptive_mt_dataset(request) + client.get_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_adaptive_mt_dataset(request) + client.get_glossary_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6086,7 +6040,7 @@ def test_get_adaptive_mt_dataset_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_adaptive_mt_dataset_empty_call_async(): +async def test_get_glossary_entry_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -6096,26 +6050,23 @@ async def test_get_adaptive_mt_dataset_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_dataset), "__call__" + type(client.transport.get_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.AdaptiveMtDataset( + common.GlossaryEntry( name="name_value", - display_name="display_name_value", - source_language_code="source_language_code_value", - target_language_code="target_language_code_value", - example_count=1396, + description="description_value", ) ) - response = await client.get_adaptive_mt_dataset() + response = await client.get_glossary_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.GetAdaptiveMtDatasetRequest() + assert args[0] == translation_service.GetGlossaryEntryRequest() @pytest.mark.asyncio -async def test_get_adaptive_mt_dataset_async_use_cached_wrapped_rpc( +async def test_get_glossary_entry_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6132,23 +6083,23 @@ async def test_get_adaptive_mt_dataset_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_adaptive_mt_dataset + client._client._transport.get_glossary_entry in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.get_adaptive_mt_dataset + client._client._transport.get_glossary_entry ] = mock_object request = {} - await client.get_adaptive_mt_dataset(request) + await client.get_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.get_adaptive_mt_dataset(request) + await client.get_glossary_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6156,9 +6107,9 @@ async def test_get_adaptive_mt_dataset_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_adaptive_mt_dataset_async( +async def test_get_glossary_entry_async( transport: str = "grpc_asyncio", - request_type=adaptive_mt.GetAdaptiveMtDatasetRequest, + request_type=translation_service.GetGlossaryEntryRequest, ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6171,57 +6122,51 @@ async def test_get_adaptive_mt_dataset_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_dataset), "__call__" + type(client.transport.get_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.AdaptiveMtDataset( + common.GlossaryEntry( name="name_value", - display_name="display_name_value", - source_language_code="source_language_code_value", - target_language_code="target_language_code_value", - example_count=1396, + description="description_value", ) ) - response = await client.get_adaptive_mt_dataset(request) + response = await client.get_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = adaptive_mt.GetAdaptiveMtDatasetRequest() + request = translation_service.GetGlossaryEntryRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, adaptive_mt.AdaptiveMtDataset) + assert isinstance(response, common.GlossaryEntry) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_language_code == "source_language_code_value" - assert response.target_language_code == "target_language_code_value" - assert response.example_count == 1396 + assert response.description == "description_value" @pytest.mark.asyncio -async def test_get_adaptive_mt_dataset_async_from_dict(): - await test_get_adaptive_mt_dataset_async(request_type=dict) +async def test_get_glossary_entry_async_from_dict(): + await test_get_glossary_entry_async(request_type=dict) -def test_get_adaptive_mt_dataset_field_headers(): +def test_get_glossary_entry_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.GetAdaptiveMtDatasetRequest() + request = translation_service.GetGlossaryEntryRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_dataset), "__call__" + type(client.transport.get_glossary_entry), "__call__" ) as call: - call.return_value = adaptive_mt.AdaptiveMtDataset() - client.get_adaptive_mt_dataset(request) + call.return_value = common.GlossaryEntry() + client.get_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6237,25 +6182,25 @@ def test_get_adaptive_mt_dataset_field_headers(): @pytest.mark.asyncio -async def test_get_adaptive_mt_dataset_field_headers_async(): +async def test_get_glossary_entry_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.GetAdaptiveMtDatasetRequest() + request = translation_service.GetGlossaryEntryRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_dataset), "__call__" + type(client.transport.get_glossary_entry), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.AdaptiveMtDataset() + common.GlossaryEntry() ) - await client.get_adaptive_mt_dataset(request) + await client.get_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6270,20 +6215,20 @@ async def test_get_adaptive_mt_dataset_field_headers_async(): ) in kw["metadata"] -def test_get_adaptive_mt_dataset_flattened(): +def test_get_glossary_entry_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_dataset), "__call__" + type(client.transport.get_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.AdaptiveMtDataset() + call.return_value = common.GlossaryEntry() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_adaptive_mt_dataset( + client.get_glossary_entry( name="name_value", ) @@ -6296,7 +6241,7 @@ def test_get_adaptive_mt_dataset_flattened(): assert arg == mock_val -def test_get_adaptive_mt_dataset_flattened_error(): +def test_get_glossary_entry_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6304,31 +6249,31 @@ def test_get_adaptive_mt_dataset_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_adaptive_mt_dataset( - adaptive_mt.GetAdaptiveMtDatasetRequest(), + client.get_glossary_entry( + translation_service.GetGlossaryEntryRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_adaptive_mt_dataset_flattened_async(): +async def test_get_glossary_entry_flattened_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_dataset), "__call__" + type(client.transport.get_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.AdaptiveMtDataset() + call.return_value = common.GlossaryEntry() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.AdaptiveMtDataset() + common.GlossaryEntry() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_adaptive_mt_dataset( + response = await client.get_glossary_entry( name="name_value", ) @@ -6342,7 +6287,7 @@ async def test_get_adaptive_mt_dataset_flattened_async(): @pytest.mark.asyncio -async def test_get_adaptive_mt_dataset_flattened_error_async(): +async def test_get_glossary_entry_flattened_error_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6350,8 +6295,8 @@ async def test_get_adaptive_mt_dataset_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_adaptive_mt_dataset( - adaptive_mt.GetAdaptiveMtDatasetRequest(), + await client.get_glossary_entry( + translation_service.GetGlossaryEntryRequest(), name="name_value", ) @@ -6359,11 +6304,11 @@ async def test_get_adaptive_mt_dataset_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - adaptive_mt.ListAdaptiveMtDatasetsRequest, + translation_service.ListGlossaryEntriesRequest, dict, ], ) -def test_list_adaptive_mt_datasets(request_type, transport: str = "grpc"): +def test_list_glossary_entries(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6375,26 +6320,26 @@ def test_list_adaptive_mt_datasets(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_adaptive_mt_datasets), "__call__" + type(client.transport.list_glossary_entries), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse( + call.return_value = translation_service.ListGlossaryEntriesResponse( next_page_token="next_page_token_value", ) - response = client.list_adaptive_mt_datasets(request) + response = client.list_glossary_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = adaptive_mt.ListAdaptiveMtDatasetsRequest() + request = translation_service.ListGlossaryEntriesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAdaptiveMtDatasetsPager) + assert isinstance(response, pagers.ListGlossaryEntriesPager) assert response.next_page_token == "next_page_token_value" -def test_list_adaptive_mt_datasets_empty_call(): +def test_list_glossary_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( @@ -6404,18 +6349,18 @@ def test_list_adaptive_mt_datasets_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_adaptive_mt_datasets), "__call__" + type(client.transport.list_glossary_entries), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_adaptive_mt_datasets() + client.list_glossary_entries() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.ListAdaptiveMtDatasetsRequest() + assert args[0] == translation_service.ListGlossaryEntriesRequest() -def test_list_adaptive_mt_datasets_non_empty_request_with_auto_populated_field(): +def test_list_glossary_entries_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -6426,30 +6371,28 @@ def test_list_adaptive_mt_datasets_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = adaptive_mt.ListAdaptiveMtDatasetsRequest( + request = translation_service.ListGlossaryEntriesRequest( parent="parent_value", page_token="page_token_value", - filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_adaptive_mt_datasets), "__call__" + type(client.transport.list_glossary_entries), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_adaptive_mt_datasets(request=request) + client.list_glossary_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.ListAdaptiveMtDatasetsRequest( + assert args[0] == translation_service.ListGlossaryEntriesRequest( parent="parent_value", page_token="page_token_value", - filter="filter_value", ) -def test_list_adaptive_mt_datasets_use_cached_wrapped_rpc(): +def test_list_glossary_entries_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6464,7 +6407,7 @@ def test_list_adaptive_mt_datasets_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_adaptive_mt_datasets + client._transport.list_glossary_entries in client._transport._wrapped_methods ) @@ -6474,15 +6417,15 @@ def test_list_adaptive_mt_datasets_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_adaptive_mt_datasets + client._transport.list_glossary_entries ] = mock_rpc request = {} - client.list_adaptive_mt_datasets(request) + client.list_glossary_entries(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_adaptive_mt_datasets(request) + client.list_glossary_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6490,7 +6433,7 @@ def test_list_adaptive_mt_datasets_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_adaptive_mt_datasets_empty_call_async(): +async def test_list_glossary_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -6500,22 +6443,22 @@ async def test_list_adaptive_mt_datasets_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_adaptive_mt_datasets), "__call__" + type(client.transport.list_glossary_entries), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.ListAdaptiveMtDatasetsResponse( + translation_service.ListGlossaryEntriesResponse( next_page_token="next_page_token_value", ) ) - response = await client.list_adaptive_mt_datasets() + response = await client.list_glossary_entries() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.ListAdaptiveMtDatasetsRequest() + assert args[0] == translation_service.ListGlossaryEntriesRequest() @pytest.mark.asyncio -async def test_list_adaptive_mt_datasets_async_use_cached_wrapped_rpc( +async def test_list_glossary_entries_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6532,23 +6475,23 @@ async def test_list_adaptive_mt_datasets_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_adaptive_mt_datasets + client._client._transport.list_glossary_entries in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.list_adaptive_mt_datasets + client._client._transport.list_glossary_entries ] = mock_object request = {} - await client.list_adaptive_mt_datasets(request) + await client.list_glossary_entries(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.list_adaptive_mt_datasets(request) + await client.list_glossary_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6556,9 +6499,9 @@ async def test_list_adaptive_mt_datasets_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_adaptive_mt_datasets_async( +async def test_list_glossary_entries_async( transport: str = "grpc_asyncio", - request_type=adaptive_mt.ListAdaptiveMtDatasetsRequest, + request_type=translation_service.ListGlossaryEntriesRequest, ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6571,49 +6514,49 @@ async def test_list_adaptive_mt_datasets_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_adaptive_mt_datasets), "__call__" + type(client.transport.list_glossary_entries), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.ListAdaptiveMtDatasetsResponse( + translation_service.ListGlossaryEntriesResponse( next_page_token="next_page_token_value", ) ) - response = await client.list_adaptive_mt_datasets(request) + response = await client.list_glossary_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = adaptive_mt.ListAdaptiveMtDatasetsRequest() + request = translation_service.ListGlossaryEntriesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAdaptiveMtDatasetsAsyncPager) + assert isinstance(response, pagers.ListGlossaryEntriesAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_list_adaptive_mt_datasets_async_from_dict(): - await test_list_adaptive_mt_datasets_async(request_type=dict) +async def test_list_glossary_entries_async_from_dict(): + await test_list_glossary_entries_async(request_type=dict) -def test_list_adaptive_mt_datasets_field_headers(): +def test_list_glossary_entries_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.ListAdaptiveMtDatasetsRequest() + request = translation_service.ListGlossaryEntriesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_adaptive_mt_datasets), "__call__" + type(client.transport.list_glossary_entries), "__call__" ) as call: - call.return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() - client.list_adaptive_mt_datasets(request) + call.return_value = translation_service.ListGlossaryEntriesResponse() + client.list_glossary_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6629,25 +6572,25 @@ def test_list_adaptive_mt_datasets_field_headers(): @pytest.mark.asyncio -async def test_list_adaptive_mt_datasets_field_headers_async(): +async def test_list_glossary_entries_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.ListAdaptiveMtDatasetsRequest() + request = translation_service.ListGlossaryEntriesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_adaptive_mt_datasets), "__call__" + type(client.transport.list_glossary_entries), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.ListAdaptiveMtDatasetsResponse() + translation_service.ListGlossaryEntriesResponse() ) - await client.list_adaptive_mt_datasets(request) + await client.list_glossary_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6662,20 +6605,20 @@ async def test_list_adaptive_mt_datasets_field_headers_async(): ) in kw["metadata"] -def test_list_adaptive_mt_datasets_flattened(): +def test_list_glossary_entries_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_adaptive_mt_datasets), "__call__" + type(client.transport.list_glossary_entries), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() + call.return_value = translation_service.ListGlossaryEntriesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_adaptive_mt_datasets( + client.list_glossary_entries( parent="parent_value", ) @@ -6688,7 +6631,7 @@ def test_list_adaptive_mt_datasets_flattened(): assert arg == mock_val -def test_list_adaptive_mt_datasets_flattened_error(): +def test_list_glossary_entries_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6696,31 +6639,31 @@ def test_list_adaptive_mt_datasets_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_adaptive_mt_datasets( - adaptive_mt.ListAdaptiveMtDatasetsRequest(), + client.list_glossary_entries( + translation_service.ListGlossaryEntriesRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_adaptive_mt_datasets_flattened_async(): +async def test_list_glossary_entries_flattened_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_adaptive_mt_datasets), "__call__" + type(client.transport.list_glossary_entries), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() + call.return_value = translation_service.ListGlossaryEntriesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.ListAdaptiveMtDatasetsResponse() + translation_service.ListGlossaryEntriesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_adaptive_mt_datasets( + response = await client.list_glossary_entries( parent="parent_value", ) @@ -6734,7 +6677,7 @@ async def test_list_adaptive_mt_datasets_flattened_async(): @pytest.mark.asyncio -async def test_list_adaptive_mt_datasets_flattened_error_async(): +async def test_list_glossary_entries_flattened_error_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6742,13 +6685,13 @@ async def test_list_adaptive_mt_datasets_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_adaptive_mt_datasets( - adaptive_mt.ListAdaptiveMtDatasetsRequest(), + await client.list_glossary_entries( + translation_service.ListGlossaryEntriesRequest(), parent="parent_value", ) -def test_list_adaptive_mt_datasets_pager(transport_name: str = "grpc"): +def test_list_glossary_entries_pager(transport_name: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -6756,32 +6699,32 @@ def test_list_adaptive_mt_datasets_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_adaptive_mt_datasets), "__call__" + type(client.transport.list_glossary_entries), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[ - adaptive_mt.AdaptiveMtDataset(), - adaptive_mt.AdaptiveMtDataset(), - adaptive_mt.AdaptiveMtDataset(), + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[ + common.GlossaryEntry(), + common.GlossaryEntry(), + common.GlossaryEntry(), ], next_page_token="abc", ), - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[], + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[], next_page_token="def", ), - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[ - adaptive_mt.AdaptiveMtDataset(), + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[ + common.GlossaryEntry(), ], next_page_token="ghi", ), - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[ - adaptive_mt.AdaptiveMtDataset(), - adaptive_mt.AdaptiveMtDataset(), + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[ + common.GlossaryEntry(), + common.GlossaryEntry(), ], ), RuntimeError, @@ -6793,9 +6736,7 @@ def test_list_adaptive_mt_datasets_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_adaptive_mt_datasets( - request={}, retry=retry, timeout=timeout - ) + pager = client.list_glossary_entries(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -6803,10 +6744,10 @@ def test_list_adaptive_mt_datasets_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, adaptive_mt.AdaptiveMtDataset) for i in results) + assert all(isinstance(i, common.GlossaryEntry) for i in results) -def test_list_adaptive_mt_datasets_pages(transport_name: str = "grpc"): +def test_list_glossary_entries_pages(transport_name: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -6814,82 +6755,82 @@ def test_list_adaptive_mt_datasets_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_adaptive_mt_datasets), "__call__" + type(client.transport.list_glossary_entries), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[ - adaptive_mt.AdaptiveMtDataset(), - adaptive_mt.AdaptiveMtDataset(), - adaptive_mt.AdaptiveMtDataset(), + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[ + common.GlossaryEntry(), + common.GlossaryEntry(), + common.GlossaryEntry(), ], next_page_token="abc", ), - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[], + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[], next_page_token="def", ), - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[ - adaptive_mt.AdaptiveMtDataset(), + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[ + common.GlossaryEntry(), ], next_page_token="ghi", ), - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[ - adaptive_mt.AdaptiveMtDataset(), - adaptive_mt.AdaptiveMtDataset(), + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[ + common.GlossaryEntry(), + common.GlossaryEntry(), ], ), RuntimeError, ) - pages = list(client.list_adaptive_mt_datasets(request={}).pages) + pages = list(client.list_glossary_entries(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_adaptive_mt_datasets_async_pager(): +async def test_list_glossary_entries_async_pager(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_adaptive_mt_datasets), + type(client.transport.list_glossary_entries), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[ - adaptive_mt.AdaptiveMtDataset(), - adaptive_mt.AdaptiveMtDataset(), - adaptive_mt.AdaptiveMtDataset(), + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[ + common.GlossaryEntry(), + common.GlossaryEntry(), + common.GlossaryEntry(), ], next_page_token="abc", ), - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[], + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[], next_page_token="def", ), - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[ - adaptive_mt.AdaptiveMtDataset(), + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[ + common.GlossaryEntry(), ], next_page_token="ghi", ), - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[ - adaptive_mt.AdaptiveMtDataset(), - adaptive_mt.AdaptiveMtDataset(), + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[ + common.GlossaryEntry(), + common.GlossaryEntry(), ], ), RuntimeError, ) - async_pager = await client.list_adaptive_mt_datasets( + async_pager = await client.list_glossary_entries( request={}, ) assert async_pager.next_page_token == "abc" @@ -6898,45 +6839,45 @@ async def test_list_adaptive_mt_datasets_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, adaptive_mt.AdaptiveMtDataset) for i in responses) + assert all(isinstance(i, common.GlossaryEntry) for i in responses) @pytest.mark.asyncio -async def test_list_adaptive_mt_datasets_async_pages(): +async def test_list_glossary_entries_async_pages(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_adaptive_mt_datasets), + type(client.transport.list_glossary_entries), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[ - adaptive_mt.AdaptiveMtDataset(), - adaptive_mt.AdaptiveMtDataset(), - adaptive_mt.AdaptiveMtDataset(), + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[ + common.GlossaryEntry(), + common.GlossaryEntry(), + common.GlossaryEntry(), ], next_page_token="abc", ), - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[], + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[], next_page_token="def", ), - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[ - adaptive_mt.AdaptiveMtDataset(), + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[ + common.GlossaryEntry(), ], next_page_token="ghi", ), - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[ - adaptive_mt.AdaptiveMtDataset(), - adaptive_mt.AdaptiveMtDataset(), + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[ + common.GlossaryEntry(), + common.GlossaryEntry(), ], ), RuntimeError, @@ -6945,7 +6886,7 @@ async def test_list_adaptive_mt_datasets_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_adaptive_mt_datasets(request={}) + await client.list_glossary_entries(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -6955,11 +6896,11 @@ async def test_list_adaptive_mt_datasets_async_pages(): @pytest.mark.parametrize( "request_type", [ - adaptive_mt.AdaptiveMtTranslateRequest, + translation_service.CreateGlossaryEntryRequest, dict, ], ) -def test_adaptive_mt_translate(request_type, transport: str = "grpc"): +def test_create_glossary_entry(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6971,26 +6912,28 @@ def test_adaptive_mt_translate(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.adaptive_mt_translate), "__call__" + type(client.transport.create_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.AdaptiveMtTranslateResponse( - language_code="language_code_value", + call.return_value = common.GlossaryEntry( + name="name_value", + description="description_value", ) - response = client.adaptive_mt_translate(request) + response = client.create_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = adaptive_mt.AdaptiveMtTranslateRequest() + request = translation_service.CreateGlossaryEntryRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, adaptive_mt.AdaptiveMtTranslateResponse) - assert response.language_code == "language_code_value" + assert isinstance(response, common.GlossaryEntry) + assert response.name == "name_value" + assert response.description == "description_value" -def test_adaptive_mt_translate_empty_call(): +def test_create_glossary_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( @@ -7000,18 +6943,18 @@ def test_adaptive_mt_translate_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.adaptive_mt_translate), "__call__" + type(client.transport.create_glossary_entry), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.adaptive_mt_translate() + client.create_glossary_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.AdaptiveMtTranslateRequest() + assert args[0] == translation_service.CreateGlossaryEntryRequest() -def test_adaptive_mt_translate_non_empty_request_with_auto_populated_field(): +def test_create_glossary_entry_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -7022,28 +6965,26 @@ def test_adaptive_mt_translate_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = adaptive_mt.AdaptiveMtTranslateRequest( + request = translation_service.CreateGlossaryEntryRequest( parent="parent_value", - dataset="dataset_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.adaptive_mt_translate), "__call__" + type(client.transport.create_glossary_entry), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.adaptive_mt_translate(request=request) + client.create_glossary_entry(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.AdaptiveMtTranslateRequest( + assert args[0] == translation_service.CreateGlossaryEntryRequest( parent="parent_value", - dataset="dataset_value", ) -def test_adaptive_mt_translate_use_cached_wrapped_rpc(): +def test_create_glossary_entry_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7058,7 +6999,7 @@ def test_adaptive_mt_translate_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.adaptive_mt_translate + client._transport.create_glossary_entry in client._transport._wrapped_methods ) @@ -7068,15 +7009,15 @@ def test_adaptive_mt_translate_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.adaptive_mt_translate + client._transport.create_glossary_entry ] = mock_rpc request = {} - client.adaptive_mt_translate(request) + client.create_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.adaptive_mt_translate(request) + client.create_glossary_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7084,7 +7025,7 @@ def test_adaptive_mt_translate_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_adaptive_mt_translate_empty_call_async(): +async def test_create_glossary_entry_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -7094,22 +7035,23 @@ async def test_adaptive_mt_translate_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.adaptive_mt_translate), "__call__" + type(client.transport.create_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.AdaptiveMtTranslateResponse( - language_code="language_code_value", + common.GlossaryEntry( + name="name_value", + description="description_value", ) ) - response = await client.adaptive_mt_translate() + response = await client.create_glossary_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.AdaptiveMtTranslateRequest() + assert args[0] == translation_service.CreateGlossaryEntryRequest() @pytest.mark.asyncio -async def test_adaptive_mt_translate_async_use_cached_wrapped_rpc( +async def test_create_glossary_entry_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7126,23 +7068,23 @@ async def test_adaptive_mt_translate_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.adaptive_mt_translate + client._client._transport.create_glossary_entry in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.adaptive_mt_translate + client._client._transport.create_glossary_entry ] = mock_object request = {} - await client.adaptive_mt_translate(request) + await client.create_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.adaptive_mt_translate(request) + await client.create_glossary_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7150,8 +7092,9 @@ async def test_adaptive_mt_translate_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_adaptive_mt_translate_async( - transport: str = "grpc_asyncio", request_type=adaptive_mt.AdaptiveMtTranslateRequest +async def test_create_glossary_entry_async( + transport: str = "grpc_asyncio", + request_type=translation_service.CreateGlossaryEntryRequest, ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7164,49 +7107,51 @@ async def test_adaptive_mt_translate_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.adaptive_mt_translate), "__call__" + type(client.transport.create_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.AdaptiveMtTranslateResponse( - language_code="language_code_value", + common.GlossaryEntry( + name="name_value", + description="description_value", ) ) - response = await client.adaptive_mt_translate(request) + response = await client.create_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = adaptive_mt.AdaptiveMtTranslateRequest() + request = translation_service.CreateGlossaryEntryRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, adaptive_mt.AdaptiveMtTranslateResponse) - assert response.language_code == "language_code_value" + assert isinstance(response, common.GlossaryEntry) + assert response.name == "name_value" + assert response.description == "description_value" @pytest.mark.asyncio -async def test_adaptive_mt_translate_async_from_dict(): - await test_adaptive_mt_translate_async(request_type=dict) +async def test_create_glossary_entry_async_from_dict(): + await test_create_glossary_entry_async(request_type=dict) -def test_adaptive_mt_translate_field_headers(): +def test_create_glossary_entry_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.AdaptiveMtTranslateRequest() + request = translation_service.CreateGlossaryEntryRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.adaptive_mt_translate), "__call__" + type(client.transport.create_glossary_entry), "__call__" ) as call: - call.return_value = adaptive_mt.AdaptiveMtTranslateResponse() - client.adaptive_mt_translate(request) + call.return_value = common.GlossaryEntry() + client.create_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7222,25 +7167,25 @@ def test_adaptive_mt_translate_field_headers(): @pytest.mark.asyncio -async def test_adaptive_mt_translate_field_headers_async(): +async def test_create_glossary_entry_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.AdaptiveMtTranslateRequest() + request = translation_service.CreateGlossaryEntryRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.adaptive_mt_translate), "__call__" + type(client.transport.create_glossary_entry), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.AdaptiveMtTranslateResponse() + common.GlossaryEntry() ) - await client.adaptive_mt_translate(request) + await client.create_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -7255,22 +7200,22 @@ async def test_adaptive_mt_translate_field_headers_async(): ) in kw["metadata"] -def test_adaptive_mt_translate_flattened(): +def test_create_glossary_entry_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.adaptive_mt_translate), "__call__" + type(client.transport.create_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.AdaptiveMtTranslateResponse() + call.return_value = common.GlossaryEntry() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.adaptive_mt_translate( + client.create_glossary_entry( parent="parent_value", - content=["content_value"], + glossary_entry=common.GlossaryEntry(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -7280,12 +7225,12 @@ def test_adaptive_mt_translate_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].content - mock_val = ["content_value"] + arg = args[0].glossary_entry + mock_val = common.GlossaryEntry(name="name_value") assert arg == mock_val -def test_adaptive_mt_translate_flattened_error(): +def test_create_glossary_entry_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7293,34 +7238,34 @@ def test_adaptive_mt_translate_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.adaptive_mt_translate( - adaptive_mt.AdaptiveMtTranslateRequest(), + client.create_glossary_entry( + translation_service.CreateGlossaryEntryRequest(), parent="parent_value", - content=["content_value"], + glossary_entry=common.GlossaryEntry(name="name_value"), ) @pytest.mark.asyncio -async def test_adaptive_mt_translate_flattened_async(): +async def test_create_glossary_entry_flattened_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.adaptive_mt_translate), "__call__" + type(client.transport.create_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.AdaptiveMtTranslateResponse() + call.return_value = common.GlossaryEntry() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.AdaptiveMtTranslateResponse() + common.GlossaryEntry() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.adaptive_mt_translate( + response = await client.create_glossary_entry( parent="parent_value", - content=["content_value"], + glossary_entry=common.GlossaryEntry(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -7330,13 +7275,13 @@ async def test_adaptive_mt_translate_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].content - mock_val = ["content_value"] + arg = args[0].glossary_entry + mock_val = common.GlossaryEntry(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_adaptive_mt_translate_flattened_error_async(): +async def test_create_glossary_entry_flattened_error_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7344,21 +7289,21 @@ async def test_adaptive_mt_translate_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.adaptive_mt_translate( - adaptive_mt.AdaptiveMtTranslateRequest(), + await client.create_glossary_entry( + translation_service.CreateGlossaryEntryRequest(), parent="parent_value", - content=["content_value"], + glossary_entry=common.GlossaryEntry(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - adaptive_mt.GetAdaptiveMtFileRequest, + translation_service.UpdateGlossaryEntryRequest, dict, ], ) -def test_get_adaptive_mt_file(request_type, transport: str = "grpc"): +def test_update_glossary_entry(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7370,30 +7315,28 @@ def test_get_adaptive_mt_file(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_file), "__call__" + type(client.transport.update_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.AdaptiveMtFile( + call.return_value = common.GlossaryEntry( name="name_value", - display_name="display_name_value", - entry_count=1210, + description="description_value", ) - response = client.get_adaptive_mt_file(request) + response = client.update_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = adaptive_mt.GetAdaptiveMtFileRequest() + request = translation_service.UpdateGlossaryEntryRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, adaptive_mt.AdaptiveMtFile) + assert isinstance(response, common.GlossaryEntry) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.entry_count == 1210 + assert response.description == "description_value" -def test_get_adaptive_mt_file_empty_call(): +def test_update_glossary_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( @@ -7403,18 +7346,18 @@ def test_get_adaptive_mt_file_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_file), "__call__" + type(client.transport.update_glossary_entry), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_adaptive_mt_file() + client.update_glossary_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.GetAdaptiveMtFileRequest() + assert args[0] == translation_service.UpdateGlossaryEntryRequest() -def test_get_adaptive_mt_file_non_empty_request_with_auto_populated_field(): +def test_update_glossary_entry_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -7425,26 +7368,22 @@ def test_get_adaptive_mt_file_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = adaptive_mt.GetAdaptiveMtFileRequest( - name="name_value", - ) + request = translation_service.UpdateGlossaryEntryRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_file), "__call__" + type(client.transport.update_glossary_entry), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_adaptive_mt_file(request=request) + client.update_glossary_entry(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.GetAdaptiveMtFileRequest( - name="name_value", - ) + assert args[0] == translation_service.UpdateGlossaryEntryRequest() -def test_get_adaptive_mt_file_use_cached_wrapped_rpc(): +def test_update_glossary_entry_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7459,7 +7398,8 @@ def test_get_adaptive_mt_file_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_adaptive_mt_file in client._transport._wrapped_methods + client._transport.update_glossary_entry + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -7468,15 +7408,15 @@ def test_get_adaptive_mt_file_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_adaptive_mt_file + client._transport.update_glossary_entry ] = mock_rpc request = {} - client.get_adaptive_mt_file(request) + client.update_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_adaptive_mt_file(request) + client.update_glossary_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7484,7 +7424,7 @@ def test_get_adaptive_mt_file_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_adaptive_mt_file_empty_call_async(): +async def test_update_glossary_entry_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -7494,24 +7434,23 @@ async def test_get_adaptive_mt_file_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_file), "__call__" + type(client.transport.update_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.AdaptiveMtFile( + common.GlossaryEntry( name="name_value", - display_name="display_name_value", - entry_count=1210, + description="description_value", ) ) - response = await client.get_adaptive_mt_file() + response = await client.update_glossary_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.GetAdaptiveMtFileRequest() + assert args[0] == translation_service.UpdateGlossaryEntryRequest() @pytest.mark.asyncio -async def test_get_adaptive_mt_file_async_use_cached_wrapped_rpc( +async def test_update_glossary_entry_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7528,23 +7467,23 @@ async def test_get_adaptive_mt_file_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_adaptive_mt_file + client._client._transport.update_glossary_entry in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.get_adaptive_mt_file + client._client._transport.update_glossary_entry ] = mock_object request = {} - await client.get_adaptive_mt_file(request) + await client.update_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.get_adaptive_mt_file(request) + await client.update_glossary_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7552,8 +7491,9 @@ async def test_get_adaptive_mt_file_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_adaptive_mt_file_async( - transport: str = "grpc_asyncio", request_type=adaptive_mt.GetAdaptiveMtFileRequest +async def test_update_glossary_entry_async( + transport: str = "grpc_asyncio", + request_type=translation_service.UpdateGlossaryEntryRequest, ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7566,53 +7506,51 @@ async def test_get_adaptive_mt_file_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_file), "__call__" + type(client.transport.update_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.AdaptiveMtFile( + common.GlossaryEntry( name="name_value", - display_name="display_name_value", - entry_count=1210, + description="description_value", ) ) - response = await client.get_adaptive_mt_file(request) + response = await client.update_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = adaptive_mt.GetAdaptiveMtFileRequest() + request = translation_service.UpdateGlossaryEntryRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, adaptive_mt.AdaptiveMtFile) + assert isinstance(response, common.GlossaryEntry) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.entry_count == 1210 + assert response.description == "description_value" @pytest.mark.asyncio -async def test_get_adaptive_mt_file_async_from_dict(): - await test_get_adaptive_mt_file_async(request_type=dict) +async def test_update_glossary_entry_async_from_dict(): + await test_update_glossary_entry_async(request_type=dict) -def test_get_adaptive_mt_file_field_headers(): +def test_update_glossary_entry_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.GetAdaptiveMtFileRequest() + request = translation_service.UpdateGlossaryEntryRequest() - request.name = "name_value" + request.glossary_entry.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_file), "__call__" + type(client.transport.update_glossary_entry), "__call__" ) as call: - call.return_value = adaptive_mt.AdaptiveMtFile() - client.get_adaptive_mt_file(request) + call.return_value = common.GlossaryEntry() + client.update_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7623,30 +7561,30 @@ def test_get_adaptive_mt_file_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "glossary_entry.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_adaptive_mt_file_field_headers_async(): +async def test_update_glossary_entry_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.GetAdaptiveMtFileRequest() + request = translation_service.UpdateGlossaryEntryRequest() - request.name = "name_value" + request.glossary_entry.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_file), "__call__" + type(client.transport.update_glossary_entry), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.AdaptiveMtFile() + common.GlossaryEntry() ) - await client.get_adaptive_mt_file(request) + await client.update_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -7657,37 +7595,37 @@ async def test_get_adaptive_mt_file_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "glossary_entry.name=name_value", ) in kw["metadata"] -def test_get_adaptive_mt_file_flattened(): +def test_update_glossary_entry_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_file), "__call__" + type(client.transport.update_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.AdaptiveMtFile() + call.return_value = common.GlossaryEntry() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_adaptive_mt_file( - name="name_value", + client.update_glossary_entry( + glossary_entry=common.GlossaryEntry(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].glossary_entry + mock_val = common.GlossaryEntry(name="name_value") assert arg == mock_val -def test_get_adaptive_mt_file_flattened_error(): +def test_update_glossary_entry_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7695,45 +7633,45 @@ def test_get_adaptive_mt_file_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_adaptive_mt_file( - adaptive_mt.GetAdaptiveMtFileRequest(), - name="name_value", + client.update_glossary_entry( + translation_service.UpdateGlossaryEntryRequest(), + glossary_entry=common.GlossaryEntry(name="name_value"), ) @pytest.mark.asyncio -async def test_get_adaptive_mt_file_flattened_async(): +async def test_update_glossary_entry_flattened_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_adaptive_mt_file), "__call__" + type(client.transport.update_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.AdaptiveMtFile() + call.return_value = common.GlossaryEntry() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.AdaptiveMtFile() + common.GlossaryEntry() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_adaptive_mt_file( - name="name_value", + response = await client.update_glossary_entry( + glossary_entry=common.GlossaryEntry(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].glossary_entry + mock_val = common.GlossaryEntry(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_get_adaptive_mt_file_flattened_error_async(): +async def test_update_glossary_entry_flattened_error_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7741,20 +7679,20 @@ async def test_get_adaptive_mt_file_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_adaptive_mt_file( - adaptive_mt.GetAdaptiveMtFileRequest(), - name="name_value", + await client.update_glossary_entry( + translation_service.UpdateGlossaryEntryRequest(), + glossary_entry=common.GlossaryEntry(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - adaptive_mt.DeleteAdaptiveMtFileRequest, + translation_service.DeleteGlossaryEntryRequest, dict, ], ) -def test_delete_adaptive_mt_file(request_type, transport: str = "grpc"): +def test_delete_glossary_entry(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7766,23 +7704,23 @@ def test_delete_adaptive_mt_file(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_adaptive_mt_file), "__call__" + type(client.transport.delete_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_adaptive_mt_file(request) + response = client.delete_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = adaptive_mt.DeleteAdaptiveMtFileRequest() + request = translation_service.DeleteGlossaryEntryRequest() assert args[0] == request # Establish that the response is the type that we expect. assert response is None -def test_delete_adaptive_mt_file_empty_call(): +def test_delete_glossary_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( @@ -7792,18 +7730,18 @@ def test_delete_adaptive_mt_file_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_adaptive_mt_file), "__call__" + type(client.transport.delete_glossary_entry), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_adaptive_mt_file() + client.delete_glossary_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.DeleteAdaptiveMtFileRequest() + assert args[0] == translation_service.DeleteGlossaryEntryRequest() -def test_delete_adaptive_mt_file_non_empty_request_with_auto_populated_field(): +def test_delete_glossary_entry_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -7814,26 +7752,26 @@ def test_delete_adaptive_mt_file_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = adaptive_mt.DeleteAdaptiveMtFileRequest( + request = translation_service.DeleteGlossaryEntryRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_adaptive_mt_file), "__call__" + type(client.transport.delete_glossary_entry), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_adaptive_mt_file(request=request) + client.delete_glossary_entry(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.DeleteAdaptiveMtFileRequest( + assert args[0] == translation_service.DeleteGlossaryEntryRequest( name="name_value", ) -def test_delete_adaptive_mt_file_use_cached_wrapped_rpc(): +def test_delete_glossary_entry_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7848,7 +7786,7 @@ def test_delete_adaptive_mt_file_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_adaptive_mt_file + client._transport.delete_glossary_entry in client._transport._wrapped_methods ) @@ -7858,15 +7796,15 @@ def test_delete_adaptive_mt_file_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_adaptive_mt_file + client._transport.delete_glossary_entry ] = mock_rpc request = {} - client.delete_adaptive_mt_file(request) + client.delete_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_adaptive_mt_file(request) + client.delete_glossary_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7874,7 +7812,7 @@ def test_delete_adaptive_mt_file_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_adaptive_mt_file_empty_call_async(): +async def test_delete_glossary_entry_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -7884,18 +7822,18 @@ async def test_delete_adaptive_mt_file_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_adaptive_mt_file), "__call__" + type(client.transport.delete_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_adaptive_mt_file() + response = await client.delete_glossary_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.DeleteAdaptiveMtFileRequest() + assert args[0] == translation_service.DeleteGlossaryEntryRequest() @pytest.mark.asyncio -async def test_delete_adaptive_mt_file_async_use_cached_wrapped_rpc( +async def test_delete_glossary_entry_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7912,23 +7850,23 @@ async def test_delete_adaptive_mt_file_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_adaptive_mt_file + client._client._transport.delete_glossary_entry in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.delete_adaptive_mt_file + client._client._transport.delete_glossary_entry ] = mock_object request = {} - await client.delete_adaptive_mt_file(request) + await client.delete_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.delete_adaptive_mt_file(request) + await client.delete_glossary_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7936,9 +7874,9 @@ async def test_delete_adaptive_mt_file_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_adaptive_mt_file_async( +async def test_delete_glossary_entry_async( transport: str = "grpc_asyncio", - request_type=adaptive_mt.DeleteAdaptiveMtFileRequest, + request_type=translation_service.DeleteGlossaryEntryRequest, ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7951,16 +7889,16 @@ async def test_delete_adaptive_mt_file_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_adaptive_mt_file), "__call__" + type(client.transport.delete_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_adaptive_mt_file(request) + response = await client.delete_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = adaptive_mt.DeleteAdaptiveMtFileRequest() + request = translation_service.DeleteGlossaryEntryRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -7968,27 +7906,27 @@ async def test_delete_adaptive_mt_file_async( @pytest.mark.asyncio -async def test_delete_adaptive_mt_file_async_from_dict(): - await test_delete_adaptive_mt_file_async(request_type=dict) +async def test_delete_glossary_entry_async_from_dict(): + await test_delete_glossary_entry_async(request_type=dict) -def test_delete_adaptive_mt_file_field_headers(): +def test_delete_glossary_entry_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.DeleteAdaptiveMtFileRequest() + request = translation_service.DeleteGlossaryEntryRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_adaptive_mt_file), "__call__" + type(client.transport.delete_glossary_entry), "__call__" ) as call: call.return_value = None - client.delete_adaptive_mt_file(request) + client.delete_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8004,23 +7942,23 @@ def test_delete_adaptive_mt_file_field_headers(): @pytest.mark.asyncio -async def test_delete_adaptive_mt_file_field_headers_async(): +async def test_delete_glossary_entry_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.DeleteAdaptiveMtFileRequest() + request = translation_service.DeleteGlossaryEntryRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_adaptive_mt_file), "__call__" + type(client.transport.delete_glossary_entry), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_adaptive_mt_file(request) + await client.delete_glossary_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8035,20 +7973,20 @@ async def test_delete_adaptive_mt_file_field_headers_async(): ) in kw["metadata"] -def test_delete_adaptive_mt_file_flattened(): +def test_delete_glossary_entry_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_adaptive_mt_file), "__call__" + type(client.transport.delete_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_adaptive_mt_file( + client.delete_glossary_entry( name="name_value", ) @@ -8061,7 +7999,7 @@ def test_delete_adaptive_mt_file_flattened(): assert arg == mock_val -def test_delete_adaptive_mt_file_flattened_error(): +def test_delete_glossary_entry_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8069,21 +8007,21 @@ def test_delete_adaptive_mt_file_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_adaptive_mt_file( - adaptive_mt.DeleteAdaptiveMtFileRequest(), + client.delete_glossary_entry( + translation_service.DeleteGlossaryEntryRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_adaptive_mt_file_flattened_async(): +async def test_delete_glossary_entry_flattened_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_adaptive_mt_file), "__call__" + type(client.transport.delete_glossary_entry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -8091,7 +8029,7 @@ async def test_delete_adaptive_mt_file_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_adaptive_mt_file( + response = await client.delete_glossary_entry( name="name_value", ) @@ -8105,7 +8043,7 @@ async def test_delete_adaptive_mt_file_flattened_async(): @pytest.mark.asyncio -async def test_delete_adaptive_mt_file_flattened_error_async(): +async def test_delete_glossary_entry_flattened_error_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8113,8 +8051,8 @@ async def test_delete_adaptive_mt_file_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_adaptive_mt_file( - adaptive_mt.DeleteAdaptiveMtFileRequest(), + await client.delete_glossary_entry( + translation_service.DeleteGlossaryEntryRequest(), name="name_value", ) @@ -8122,11 +8060,11 @@ async def test_delete_adaptive_mt_file_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - adaptive_mt.ImportAdaptiveMtFileRequest, + automl_translation.CreateDatasetRequest, dict, ], ) -def test_import_adaptive_mt_file(request_type, transport: str = "grpc"): +def test_create_dataset(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8137,24 +8075,22 @@ def test_import_adaptive_mt_file(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_adaptive_mt_file), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.ImportAdaptiveMtFileResponse() - response = client.import_adaptive_mt_file(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_dataset(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = adaptive_mt.ImportAdaptiveMtFileRequest() + request = automl_translation.CreateDatasetRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, adaptive_mt.ImportAdaptiveMtFileResponse) + assert isinstance(response, future.Future) -def test_import_adaptive_mt_file_empty_call(): +def test_create_dataset_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( @@ -8163,19 +8099,17 @@ def test_import_adaptive_mt_file_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_adaptive_mt_file), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.import_adaptive_mt_file() + client.create_dataset() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.ImportAdaptiveMtFileRequest() + assert args[0] == automl_translation.CreateDatasetRequest() -def test_import_adaptive_mt_file_non_empty_request_with_auto_populated_field(): +def test_create_dataset_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -8186,26 +8120,24 @@ def test_import_adaptive_mt_file_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = adaptive_mt.ImportAdaptiveMtFileRequest( + request = automl_translation.CreateDatasetRequest( parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_adaptive_mt_file), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.import_adaptive_mt_file(request=request) + client.create_dataset(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.ImportAdaptiveMtFileRequest( + assert args[0] == automl_translation.CreateDatasetRequest( parent="parent_value", ) -def test_import_adaptive_mt_file_use_cached_wrapped_rpc(): +def test_create_dataset_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8219,26 +8151,25 @@ def test_import_adaptive_mt_file_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.import_adaptive_mt_file - in client._transport._wrapped_methods - ) + assert client._transport.create_dataset in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_adaptive_mt_file - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_dataset] = mock_rpc request = {} - client.import_adaptive_mt_file(request) + client.create_dataset(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.import_adaptive_mt_file(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8246,7 +8177,7 @@ def test_import_adaptive_mt_file_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_import_adaptive_mt_file_empty_call_async(): +async def test_create_dataset_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -8255,21 +8186,19 @@ async def test_import_adaptive_mt_file_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_adaptive_mt_file), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.ImportAdaptiveMtFileResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.import_adaptive_mt_file() + response = await client.create_dataset() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.ImportAdaptiveMtFileRequest() + assert args[0] == automl_translation.CreateDatasetRequest() @pytest.mark.asyncio -async def test_import_adaptive_mt_file_async_use_cached_wrapped_rpc( +async def test_create_dataset_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8286,23 +8215,27 @@ async def test_import_adaptive_mt_file_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.import_adaptive_mt_file + client._client._transport.create_dataset in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.import_adaptive_mt_file + client._client._transport.create_dataset ] = mock_object request = {} - await client.import_adaptive_mt_file(request) + await client.create_dataset(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.import_adaptive_mt_file(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8310,9 +8243,9 @@ async def test_import_adaptive_mt_file_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_import_adaptive_mt_file_async( +async def test_create_dataset_async( transport: str = "grpc_asyncio", - request_type=adaptive_mt.ImportAdaptiveMtFileRequest, + request_type=automl_translation.CreateDatasetRequest, ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8324,47 +8257,43 @@ async def test_import_adaptive_mt_file_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_adaptive_mt_file), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.ImportAdaptiveMtFileResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.import_adaptive_mt_file(request) + response = await client.create_dataset(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = adaptive_mt.ImportAdaptiveMtFileRequest() + request = automl_translation.CreateDatasetRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, adaptive_mt.ImportAdaptiveMtFileResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_import_adaptive_mt_file_async_from_dict(): - await test_import_adaptive_mt_file_async(request_type=dict) +async def test_create_dataset_async_from_dict(): + await test_create_dataset_async(request_type=dict) -def test_import_adaptive_mt_file_field_headers(): +def test_create_dataset_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.ImportAdaptiveMtFileRequest() + request = automl_translation.CreateDatasetRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_adaptive_mt_file), "__call__" - ) as call: - call.return_value = adaptive_mt.ImportAdaptiveMtFileResponse() - client.import_adaptive_mt_file(request) + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_dataset(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8380,25 +8309,23 @@ def test_import_adaptive_mt_file_field_headers(): @pytest.mark.asyncio -async def test_import_adaptive_mt_file_field_headers_async(): +async def test_create_dataset_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.ImportAdaptiveMtFileRequest() + request = automl_translation.CreateDatasetRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_adaptive_mt_file), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.ImportAdaptiveMtFileResponse() + operations_pb2.Operation(name="operations/op") ) - await client.import_adaptive_mt_file(request) + await client.create_dataset(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8413,21 +8340,20 @@ async def test_import_adaptive_mt_file_field_headers_async(): ) in kw["metadata"] -def test_import_adaptive_mt_file_flattened(): +def test_create_dataset_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_adaptive_mt_file), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.ImportAdaptiveMtFileResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.import_adaptive_mt_file( + client.create_dataset( parent="parent_value", + dataset=automl_translation.Dataset(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -8437,9 +8363,12 @@ def test_import_adaptive_mt_file_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val + arg = args[0].dataset + mock_val = automl_translation.Dataset(name="name_value") + assert arg == mock_val -def test_import_adaptive_mt_file_flattened_error(): +def test_create_dataset_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8447,32 +8376,32 @@ def test_import_adaptive_mt_file_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.import_adaptive_mt_file( - adaptive_mt.ImportAdaptiveMtFileRequest(), + client.create_dataset( + automl_translation.CreateDatasetRequest(), parent="parent_value", + dataset=automl_translation.Dataset(name="name_value"), ) @pytest.mark.asyncio -async def test_import_adaptive_mt_file_flattened_async(): +async def test_create_dataset_flattened_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_adaptive_mt_file), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.ImportAdaptiveMtFileResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.ImportAdaptiveMtFileResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.import_adaptive_mt_file( + response = await client.create_dataset( parent="parent_value", + dataset=automl_translation.Dataset(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -8482,10 +8411,13 @@ async def test_import_adaptive_mt_file_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val + arg = args[0].dataset + mock_val = automl_translation.Dataset(name="name_value") + assert arg == mock_val @pytest.mark.asyncio -async def test_import_adaptive_mt_file_flattened_error_async(): +async def test_create_dataset_flattened_error_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8493,20 +8425,21 @@ async def test_import_adaptive_mt_file_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.import_adaptive_mt_file( - adaptive_mt.ImportAdaptiveMtFileRequest(), + await client.create_dataset( + automl_translation.CreateDatasetRequest(), parent="parent_value", + dataset=automl_translation.Dataset(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - adaptive_mt.ListAdaptiveMtFilesRequest, + automl_translation.GetDatasetRequest, dict, ], ) -def test_list_adaptive_mt_files(request_type, transport: str = "grpc"): +def test_get_dataset(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8517,27 +8450,39 @@ def test_list_adaptive_mt_files(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_files), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.ListAdaptiveMtFilesResponse( - next_page_token="next_page_token_value", + call.return_value = automl_translation.Dataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + train_example_count=2033, + validate_example_count=2333, + test_example_count=1939, ) - response = client.list_adaptive_mt_files(request) + response = client.get_dataset(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = adaptive_mt.ListAdaptiveMtFilesRequest() + request = automl_translation.GetDatasetRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAdaptiveMtFilesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, automl_translation.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.example_count == 1396 + assert response.train_example_count == 2033 + assert response.validate_example_count == 2333 + assert response.test_example_count == 1939 -def test_list_adaptive_mt_files_empty_call(): +def test_get_dataset_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( @@ -8546,19 +8491,17 @@ def test_list_adaptive_mt_files_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_files), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_adaptive_mt_files() + client.get_dataset() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.ListAdaptiveMtFilesRequest() + assert args[0] == automl_translation.GetDatasetRequest() -def test_list_adaptive_mt_files_non_empty_request_with_auto_populated_field(): +def test_get_dataset_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -8569,28 +8512,24 @@ def test_list_adaptive_mt_files_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = adaptive_mt.ListAdaptiveMtFilesRequest( - parent="parent_value", - page_token="page_token_value", + request = automl_translation.GetDatasetRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_files), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_adaptive_mt_files(request=request) + client.get_dataset(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.ListAdaptiveMtFilesRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == automl_translation.GetDatasetRequest( + name="name_value", ) -def test_list_adaptive_mt_files_use_cached_wrapped_rpc(): +def test_get_dataset_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8604,26 +8543,21 @@ def test_list_adaptive_mt_files_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_adaptive_mt_files - in client._transport._wrapped_methods - ) + assert client._transport.get_dataset in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_adaptive_mt_files - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_dataset] = mock_rpc request = {} - client.list_adaptive_mt_files(request) + client.get_dataset(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_adaptive_mt_files(request) + client.get_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8631,7 +8565,7 @@ def test_list_adaptive_mt_files_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_adaptive_mt_files_empty_call_async(): +async def test_get_dataset_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -8640,23 +8574,28 @@ async def test_list_adaptive_mt_files_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_files), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.ListAdaptiveMtFilesResponse( - next_page_token="next_page_token_value", + automl_translation.Dataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + train_example_count=2033, + validate_example_count=2333, + test_example_count=1939, ) ) - response = await client.list_adaptive_mt_files() + response = await client.get_dataset() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.ListAdaptiveMtFilesRequest() + assert args[0] == automl_translation.GetDatasetRequest() @pytest.mark.asyncio -async def test_list_adaptive_mt_files_async_use_cached_wrapped_rpc( +async def test_get_dataset_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8673,23 +8612,23 @@ async def test_list_adaptive_mt_files_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_adaptive_mt_files + client._client._transport.get_dataset in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.list_adaptive_mt_files + client._client._transport.get_dataset ] = mock_object request = {} - await client.list_adaptive_mt_files(request) + await client.get_dataset(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.list_adaptive_mt_files(request) + await client.get_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8697,8 +8636,8 @@ async def test_list_adaptive_mt_files_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_adaptive_mt_files_async( - transport: str = "grpc_asyncio", request_type=adaptive_mt.ListAdaptiveMtFilesRequest +async def test_get_dataset_async( + transport: str = "grpc_asyncio", request_type=automl_translation.GetDatasetRequest ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8710,50 +8649,60 @@ async def test_list_adaptive_mt_files_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_files), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.ListAdaptiveMtFilesResponse( - next_page_token="next_page_token_value", + automl_translation.Dataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + train_example_count=2033, + validate_example_count=2333, + test_example_count=1939, ) ) - response = await client.list_adaptive_mt_files(request) + response = await client.get_dataset(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = adaptive_mt.ListAdaptiveMtFilesRequest() + request = automl_translation.GetDatasetRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAdaptiveMtFilesAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, automl_translation.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.example_count == 1396 + assert response.train_example_count == 2033 + assert response.validate_example_count == 2333 + assert response.test_example_count == 1939 @pytest.mark.asyncio -async def test_list_adaptive_mt_files_async_from_dict(): - await test_list_adaptive_mt_files_async(request_type=dict) +async def test_get_dataset_async_from_dict(): + await test_get_dataset_async(request_type=dict) -def test_list_adaptive_mt_files_field_headers(): +def test_get_dataset_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.ListAdaptiveMtFilesRequest() + request = automl_translation.GetDatasetRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_files), "__call__" - ) as call: - call.return_value = adaptive_mt.ListAdaptiveMtFilesResponse() - client.list_adaptive_mt_files(request) + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + call.return_value = automl_translation.Dataset() + client.get_dataset(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8764,30 +8713,28 @@ def test_list_adaptive_mt_files_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_adaptive_mt_files_field_headers_async(): +async def test_get_dataset_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.ListAdaptiveMtFilesRequest() + request = automl_translation.GetDatasetRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_files), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.ListAdaptiveMtFilesResponse() + automl_translation.Dataset() ) - await client.list_adaptive_mt_files(request) + await client.get_dataset(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8798,37 +8745,35 @@ async def test_list_adaptive_mt_files_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_adaptive_mt_files_flattened(): +def test_get_dataset_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_files), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.ListAdaptiveMtFilesResponse() + call.return_value = automl_translation.Dataset() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_adaptive_mt_files( - parent="parent_value", + client.get_dataset( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_adaptive_mt_files_flattened_error(): +def test_get_dataset_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8836,45 +8781,43 @@ def test_list_adaptive_mt_files_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_adaptive_mt_files( - adaptive_mt.ListAdaptiveMtFilesRequest(), - parent="parent_value", + client.get_dataset( + automl_translation.GetDatasetRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_adaptive_mt_files_flattened_async(): +async def test_get_dataset_flattened_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_files), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.ListAdaptiveMtFilesResponse() + call.return_value = automl_translation.Dataset() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.ListAdaptiveMtFilesResponse() + automl_translation.Dataset() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_adaptive_mt_files( - parent="parent_value", + response = await client.get_dataset( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_adaptive_mt_files_flattened_error_async(): +async def test_get_dataset_flattened_error_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8882,253 +8825,49 @@ async def test_list_adaptive_mt_files_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_adaptive_mt_files( - adaptive_mt.ListAdaptiveMtFilesRequest(), - parent="parent_value", + await client.get_dataset( + automl_translation.GetDatasetRequest(), + name="name_value", ) -def test_list_adaptive_mt_files_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + automl_translation.ListDatasetsRequest, + dict, + ], +) +def test_list_datasets(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_files), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[ - adaptive_mt.AdaptiveMtFile(), - adaptive_mt.AdaptiveMtFile(), - adaptive_mt.AdaptiveMtFile(), - ], - next_page_token="abc", - ), - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[], - next_page_token="def", - ), - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[ - adaptive_mt.AdaptiveMtFile(), - ], - next_page_token="ghi", - ), - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[ - adaptive_mt.AdaptiveMtFile(), - adaptive_mt.AdaptiveMtFile(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_adaptive_mt_files(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, adaptive_mt.AdaptiveMtFile) for i in results) - - -def test_list_adaptive_mt_files_pages(transport_name: str = "grpc"): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_files), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[ - adaptive_mt.AdaptiveMtFile(), - adaptive_mt.AdaptiveMtFile(), - adaptive_mt.AdaptiveMtFile(), - ], - next_page_token="abc", - ), - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[], - next_page_token="def", - ), - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[ - adaptive_mt.AdaptiveMtFile(), - ], - next_page_token="ghi", - ), - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[ - adaptive_mt.AdaptiveMtFile(), - adaptive_mt.AdaptiveMtFile(), - ], - ), - RuntimeError, - ) - pages = list(client.list_adaptive_mt_files(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_adaptive_mt_files_async_pager(): - client = TranslationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_files), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[ - adaptive_mt.AdaptiveMtFile(), - adaptive_mt.AdaptiveMtFile(), - adaptive_mt.AdaptiveMtFile(), - ], - next_page_token="abc", - ), - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[], - next_page_token="def", - ), - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[ - adaptive_mt.AdaptiveMtFile(), - ], - next_page_token="ghi", - ), - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[ - adaptive_mt.AdaptiveMtFile(), - adaptive_mt.AdaptiveMtFile(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_adaptive_mt_files( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, adaptive_mt.AdaptiveMtFile) for i in responses) - - -@pytest.mark.asyncio -async def test_list_adaptive_mt_files_async_pages(): - client = TranslationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_files), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[ - adaptive_mt.AdaptiveMtFile(), - adaptive_mt.AdaptiveMtFile(), - adaptive_mt.AdaptiveMtFile(), - ], - next_page_token="abc", - ), - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[], - next_page_token="def", - ), - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[ - adaptive_mt.AdaptiveMtFile(), - ], - next_page_token="ghi", - ), - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[ - adaptive_mt.AdaptiveMtFile(), - adaptive_mt.AdaptiveMtFile(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_adaptive_mt_files(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - adaptive_mt.ListAdaptiveMtSentencesRequest, - dict, - ], -) -def test_list_adaptive_mt_sentences(request_type, transport: str = "grpc"): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_sentences), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.ListAdaptiveMtSentencesResponse( + call.return_value = automl_translation.ListDatasetsResponse( next_page_token="next_page_token_value", ) - response = client.list_adaptive_mt_sentences(request) + response = client.list_datasets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = adaptive_mt.ListAdaptiveMtSentencesRequest() + request = automl_translation.ListDatasetsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAdaptiveMtSentencesPager) + assert isinstance(response, pagers.ListDatasetsPager) assert response.next_page_token == "next_page_token_value" -def test_list_adaptive_mt_sentences_empty_call(): +def test_list_datasets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( @@ -9137,19 +8876,17 @@ def test_list_adaptive_mt_sentences_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_sentences), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_adaptive_mt_sentences() + client.list_datasets() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.ListAdaptiveMtSentencesRequest() + assert args[0] == automl_translation.ListDatasetsRequest() -def test_list_adaptive_mt_sentences_non_empty_request_with_auto_populated_field(): +def test_list_datasets_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( @@ -9160,28 +8897,26 @@ def test_list_adaptive_mt_sentences_non_empty_request_with_auto_populated_field( # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = adaptive_mt.ListAdaptiveMtSentencesRequest( + request = automl_translation.ListDatasetsRequest( parent="parent_value", page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_sentences), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_adaptive_mt_sentences(request=request) + client.list_datasets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.ListAdaptiveMtSentencesRequest( + assert args[0] == automl_translation.ListDatasetsRequest( parent="parent_value", page_token="page_token_value", ) -def test_list_adaptive_mt_sentences_use_cached_wrapped_rpc(): +def test_list_datasets_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9195,26 +8930,21 @@ def test_list_adaptive_mt_sentences_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_adaptive_mt_sentences - in client._transport._wrapped_methods - ) + assert client._transport.list_datasets in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_adaptive_mt_sentences - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_datasets] = mock_rpc request = {} - client.list_adaptive_mt_sentences(request) + client.list_datasets(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_adaptive_mt_sentences(request) + client.list_datasets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9222,7 +8952,7 @@ def test_list_adaptive_mt_sentences_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_adaptive_mt_sentences_empty_call_async(): +async def test_list_datasets_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranslationServiceAsyncClient( @@ -9231,23 +8961,21 @@ async def test_list_adaptive_mt_sentences_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_sentences), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.ListAdaptiveMtSentencesResponse( + automl_translation.ListDatasetsResponse( next_page_token="next_page_token_value", ) ) - response = await client.list_adaptive_mt_sentences() + response = await client.list_datasets() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == adaptive_mt.ListAdaptiveMtSentencesRequest() + assert args[0] == automl_translation.ListDatasetsRequest() @pytest.mark.asyncio -async def test_list_adaptive_mt_sentences_async_use_cached_wrapped_rpc( +async def test_list_datasets_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9264,23 +8992,23 @@ async def test_list_adaptive_mt_sentences_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_adaptive_mt_sentences + client._client._transport.list_datasets in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.list_adaptive_mt_sentences + client._client._transport.list_datasets ] = mock_object request = {} - await client.list_adaptive_mt_sentences(request) + await client.list_datasets(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.list_adaptive_mt_sentences(request) + await client.list_datasets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9288,9 +9016,8 @@ async def test_list_adaptive_mt_sentences_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_adaptive_mt_sentences_async( - transport: str = "grpc_asyncio", - request_type=adaptive_mt.ListAdaptiveMtSentencesRequest, +async def test_list_datasets_async( + transport: str = "grpc_asyncio", request_type=automl_translation.ListDatasetsRequest ): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9302,50 +9029,46 @@ async def test_list_adaptive_mt_sentences_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_sentences), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.ListAdaptiveMtSentencesResponse( + automl_translation.ListDatasetsResponse( next_page_token="next_page_token_value", ) ) - response = await client.list_adaptive_mt_sentences(request) + response = await client.list_datasets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = adaptive_mt.ListAdaptiveMtSentencesRequest() + request = automl_translation.ListDatasetsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAdaptiveMtSentencesAsyncPager) + assert isinstance(response, pagers.ListDatasetsAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_list_adaptive_mt_sentences_async_from_dict(): - await test_list_adaptive_mt_sentences_async(request_type=dict) +async def test_list_datasets_async_from_dict(): + await test_list_datasets_async(request_type=dict) -def test_list_adaptive_mt_sentences_field_headers(): +def test_list_datasets_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.ListAdaptiveMtSentencesRequest() + request = automl_translation.ListDatasetsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_sentences), "__call__" - ) as call: - call.return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() - client.list_adaptive_mt_sentences(request) + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + call.return_value = automl_translation.ListDatasetsResponse() + client.list_datasets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9361,25 +9084,23 @@ def test_list_adaptive_mt_sentences_field_headers(): @pytest.mark.asyncio -async def test_list_adaptive_mt_sentences_field_headers_async(): +async def test_list_datasets_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = adaptive_mt.ListAdaptiveMtSentencesRequest() + request = automl_translation.ListDatasetsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_sentences), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.ListAdaptiveMtSentencesResponse() + automl_translation.ListDatasetsResponse() ) - await client.list_adaptive_mt_sentences(request) + await client.list_datasets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9394,20 +9115,18 @@ async def test_list_adaptive_mt_sentences_field_headers_async(): ) in kw["metadata"] -def test_list_adaptive_mt_sentences_flattened(): +def test_list_datasets_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_sentences), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() + call.return_value = automl_translation.ListDatasetsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_adaptive_mt_sentences( + client.list_datasets( parent="parent_value", ) @@ -9420,7 +9139,7 @@ def test_list_adaptive_mt_sentences_flattened(): assert arg == mock_val -def test_list_adaptive_mt_sentences_flattened_error(): +def test_list_datasets_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9428,31 +9147,29 @@ def test_list_adaptive_mt_sentences_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_adaptive_mt_sentences( - adaptive_mt.ListAdaptiveMtSentencesRequest(), + client.list_datasets( + automl_translation.ListDatasetsRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_adaptive_mt_sentences_flattened_async(): +async def test_list_datasets_flattened_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_sentences), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() + call.return_value = automl_translation.ListDatasetsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - adaptive_mt.ListAdaptiveMtSentencesResponse() + automl_translation.ListDatasetsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_adaptive_mt_sentences( + response = await client.list_datasets( parent="parent_value", ) @@ -9466,7 +9183,7 @@ async def test_list_adaptive_mt_sentences_flattened_async(): @pytest.mark.asyncio -async def test_list_adaptive_mt_sentences_flattened_error_async(): +async def test_list_datasets_flattened_error_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9474,46 +9191,44 @@ async def test_list_adaptive_mt_sentences_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_adaptive_mt_sentences( - adaptive_mt.ListAdaptiveMtSentencesRequest(), + await client.list_datasets( + automl_translation.ListDatasetsRequest(), parent="parent_value", ) -def test_list_adaptive_mt_sentences_pager(transport_name: str = "grpc"): +def test_list_datasets_pager(transport_name: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_sentences), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[ - adaptive_mt.AdaptiveMtSentence(), - adaptive_mt.AdaptiveMtSentence(), - adaptive_mt.AdaptiveMtSentence(), + automl_translation.ListDatasetsResponse( + datasets=[ + automl_translation.Dataset(), + automl_translation.Dataset(), + automl_translation.Dataset(), ], next_page_token="abc", ), - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[], + automl_translation.ListDatasetsResponse( + datasets=[], next_page_token="def", ), - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[ - adaptive_mt.AdaptiveMtSentence(), + automl_translation.ListDatasetsResponse( + datasets=[ + automl_translation.Dataset(), ], next_page_token="ghi", ), - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[ - adaptive_mt.AdaptiveMtSentence(), - adaptive_mt.AdaptiveMtSentence(), + automl_translation.ListDatasetsResponse( + datasets=[ + automl_translation.Dataset(), + automl_translation.Dataset(), ], ), RuntimeError, @@ -9525,9 +9240,7 @@ def test_list_adaptive_mt_sentences_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_adaptive_mt_sentences( - request={}, retry=retry, timeout=timeout - ) + pager = client.list_datasets(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -9535,93 +9248,89 @@ def test_list_adaptive_mt_sentences_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, adaptive_mt.AdaptiveMtSentence) for i in results) + assert all(isinstance(i, automl_translation.Dataset) for i in results) -def test_list_adaptive_mt_sentences_pages(transport_name: str = "grpc"): +def test_list_datasets_pages(transport_name: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_adaptive_mt_sentences), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[ - adaptive_mt.AdaptiveMtSentence(), - adaptive_mt.AdaptiveMtSentence(), - adaptive_mt.AdaptiveMtSentence(), + automl_translation.ListDatasetsResponse( + datasets=[ + automl_translation.Dataset(), + automl_translation.Dataset(), + automl_translation.Dataset(), ], next_page_token="abc", ), - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[], + automl_translation.ListDatasetsResponse( + datasets=[], next_page_token="def", ), - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[ - adaptive_mt.AdaptiveMtSentence(), + automl_translation.ListDatasetsResponse( + datasets=[ + automl_translation.Dataset(), ], next_page_token="ghi", ), - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[ - adaptive_mt.AdaptiveMtSentence(), - adaptive_mt.AdaptiveMtSentence(), + automl_translation.ListDatasetsResponse( + datasets=[ + automl_translation.Dataset(), + automl_translation.Dataset(), ], ), RuntimeError, ) - pages = list(client.list_adaptive_mt_sentences(request={}).pages) + pages = list(client.list_datasets(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_adaptive_mt_sentences_async_pager(): +async def test_list_datasets_async_pager(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_adaptive_mt_sentences), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_datasets), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[ - adaptive_mt.AdaptiveMtSentence(), - adaptive_mt.AdaptiveMtSentence(), - adaptive_mt.AdaptiveMtSentence(), + automl_translation.ListDatasetsResponse( + datasets=[ + automl_translation.Dataset(), + automl_translation.Dataset(), + automl_translation.Dataset(), ], next_page_token="abc", ), - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[], + automl_translation.ListDatasetsResponse( + datasets=[], next_page_token="def", ), - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[ - adaptive_mt.AdaptiveMtSentence(), + automl_translation.ListDatasetsResponse( + datasets=[ + automl_translation.Dataset(), ], next_page_token="ghi", ), - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[ - adaptive_mt.AdaptiveMtSentence(), - adaptive_mt.AdaptiveMtSentence(), + automl_translation.ListDatasetsResponse( + datasets=[ + automl_translation.Dataset(), + automl_translation.Dataset(), ], ), RuntimeError, ) - async_pager = await client.list_adaptive_mt_sentences( + async_pager = await client.list_datasets( request={}, ) assert async_pager.next_page_token == "abc" @@ -9630,45 +9339,43 @@ async def test_list_adaptive_mt_sentences_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, adaptive_mt.AdaptiveMtSentence) for i in responses) + assert all(isinstance(i, automl_translation.Dataset) for i in responses) @pytest.mark.asyncio -async def test_list_adaptive_mt_sentences_async_pages(): +async def test_list_datasets_async_pages(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_adaptive_mt_sentences), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_datasets), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[ - adaptive_mt.AdaptiveMtSentence(), - adaptive_mt.AdaptiveMtSentence(), - adaptive_mt.AdaptiveMtSentence(), + automl_translation.ListDatasetsResponse( + datasets=[ + automl_translation.Dataset(), + automl_translation.Dataset(), + automl_translation.Dataset(), ], next_page_token="abc", ), - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[], + automl_translation.ListDatasetsResponse( + datasets=[], next_page_token="def", ), - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[ - adaptive_mt.AdaptiveMtSentence(), + automl_translation.ListDatasetsResponse( + datasets=[ + automl_translation.Dataset(), ], next_page_token="ghi", ), - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[ - adaptive_mt.AdaptiveMtSentence(), - adaptive_mt.AdaptiveMtSentence(), + automl_translation.ListDatasetsResponse( + datasets=[ + automl_translation.Dataset(), + automl_translation.Dataset(), ], ), RuntimeError, @@ -9677,7 +9384,7 @@ async def test_list_adaptive_mt_sentences_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_adaptive_mt_sentences(request={}) + await client.list_datasets(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -9687,47 +9394,90 @@ async def test_list_adaptive_mt_sentences_async_pages(): @pytest.mark.parametrize( "request_type", [ - translation_service.TranslateTextRequest, + automl_translation.DeleteDatasetRequest, dict, ], ) -def test_translate_text_rest(request_type): +def test_delete_dataset(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = translation_service.TranslateTextResponse() + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = translation_service.TranslateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_dataset(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.translate_text(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = automl_translation.DeleteDatasetRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.TranslateTextResponse) + assert isinstance(response, future.Future) -def test_translate_text_rest_use_cached_wrapped_rpc(): +def test_delete_dataset_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.DeleteDatasetRequest() + + +def test_delete_dataset_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = automl_translation.DeleteDatasetRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_dataset(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.DeleteDatasetRequest( + name="name_value", + ) + + +def test_delete_dataset_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -9735,327 +9485,381 @@ def test_translate_text_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.translate_text in client._transport._wrapped_methods + assert client._transport.delete_dataset in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.translate_text] = mock_rpc - + client._transport._wrapped_methods[client._transport.delete_dataset] = mock_rpc request = {} - client.translate_text(request) + client.delete_dataset(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.translate_text(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_translate_text_rest_required_fields( - request_type=translation_service.TranslateTextRequest, -): - transport_class = transports.TranslationServiceRestTransport - - request_init = {} - request_init["contents"] = "" - request_init["target_language_code"] = "" - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_delete_dataset_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).translate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.DeleteDatasetRequest() - # verify required fields with default values are now present - jsonified_request["contents"] = "contents_value" - jsonified_request["targetLanguageCode"] = "target_language_code_value" - jsonified_request["parent"] = "parent_value" +@pytest.mark.asyncio +async def test_delete_dataset_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).translate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify required fields with non-default values are left alone - assert "contents" in jsonified_request - assert jsonified_request["contents"] == "contents_value" - assert "targetLanguageCode" in jsonified_request - assert jsonified_request["targetLanguageCode"] == "target_language_code_value" - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Ensure method has been cached + assert ( + client._client._transport.delete_dataset + in client._client._transport._wrapped_methods + ) - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = translation_service.TranslateTextResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_dataset + ] = mock_object - # Convert return value to protobuf type - return_value = translation_service.TranslateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + request = {} + await client.delete_dataset(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - response = client.translate_text(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + await client.delete_dataset(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 -def test_translate_text_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.translate_text._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "contents", - "targetLanguageCode", - "parent", - ) - ) +@pytest.mark.asyncio +async def test_delete_dataset_async( + transport: str = "grpc_asyncio", + request_type=automl_translation.DeleteDatasetRequest, +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_translate_text_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), - ) - client = TranslationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_translate_text" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_translate_text" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = translation_service.TranslateTextRequest.pb( - translation_service.TranslateTextRequest() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + response = await client.delete_dataset(request) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = translation_service.TranslateTextResponse.to_json( - translation_service.TranslateTextResponse() - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = automl_translation.DeleteDatasetRequest() + assert args[0] == request - request = translation_service.TranslateTextRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = translation_service.TranslateTextResponse() + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - client.translate_text( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - pre.assert_called_once() - post.assert_called_once() +@pytest.mark.asyncio +async def test_delete_dataset_async_from_dict(): + await test_delete_dataset_async(request_type=dict) -def test_translate_text_rest_bad_request( - transport: str = "rest", request_type=translation_service.TranslateTextRequest -): +def test_delete_dataset_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = automl_translation.DeleteDatasetRequest() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.translate_text(request) + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_dataset(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_translate_text_rest_flattened(): - client = TranslationServiceClient( + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_dataset_field_headers_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = translation_service.TranslateTextResponse() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = automl_translation.DeleteDatasetRequest() - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + request.name = "name_value" - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - target_language_code="target_language_code_value", - contents=["contents_value"], - model="model_value", - mime_type="mime_type_value", - source_language_code="source_language_code_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) - mock_args.update(sample_request) + await client.delete_dataset(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = translation_service.TranslateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.translate_text(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_dataset_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_dataset( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*}:translateText" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_translate_text_rest_flattened_error(transport: str = "rest"): +def test_delete_dataset_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.translate_text( - translation_service.TranslateTextRequest(), - parent="parent_value", - target_language_code="target_language_code_value", - contents=["contents_value"], - model="model_value", - mime_type="mime_type_value", - source_language_code="source_language_code_value", + client.delete_dataset( + automl_translation.DeleteDatasetRequest(), + name="name_value", ) -def test_translate_text_rest_error(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" +@pytest.mark.asyncio +async def test_delete_dataset_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_dataset( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_dataset_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_dataset( + automl_translation.DeleteDatasetRequest(), + name="name_value", + ) + @pytest.mark.parametrize( "request_type", [ - translation_service.DetectLanguageRequest, + adaptive_mt.CreateAdaptiveMtDatasetRequest, dict, ], ) -def test_detect_language_rest(request_type): +def test_create_adaptive_mt_dataset(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = translation_service.DetectLanguageResponse() + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = translation_service.DetectLanguageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtDataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + ) + response = client.create_adaptive_mt_dataset(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.detect_language(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = adaptive_mt.CreateAdaptiveMtDatasetRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.DetectLanguageResponse) + assert isinstance(response, adaptive_mt.AdaptiveMtDataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.example_count == 1396 -def test_detect_language_rest_use_cached_wrapped_rpc(): +def test_create_adaptive_mt_dataset_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_adaptive_mt_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.CreateAdaptiveMtDatasetRequest() + + +def test_create_adaptive_mt_dataset_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = adaptive_mt.CreateAdaptiveMtDatasetRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_adaptive_mt_dataset(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.CreateAdaptiveMtDatasetRequest( + parent="parent_value", + ) + + +def test_create_adaptive_mt_dataset_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -10063,305 +9867,406 @@ def test_detect_language_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.detect_language in client._transport._wrapped_methods + assert ( + client._transport.create_adaptive_mt_dataset + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.detect_language] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.create_adaptive_mt_dataset + ] = mock_rpc request = {} - client.detect_language(request) + client.create_adaptive_mt_dataset(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.detect_language(request) + client.create_adaptive_mt_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_detect_language_rest_required_fields( - request_type=translation_service.DetectLanguageRequest, +@pytest.mark.asyncio +async def test_create_adaptive_mt_dataset_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtDataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + ) + ) + response = await client.create_adaptive_mt_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.CreateAdaptiveMtDatasetRequest() + + +@pytest.mark.asyncio +async def test_create_adaptive_mt_dataset_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.TranslationServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.create_adaptive_mt_dataset + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).detect_language._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_adaptive_mt_dataset + ] = mock_object - # verify required fields with default values are now present + request = {} + await client.create_adaptive_mt_dataset(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).detect_language._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + await client.create_adaptive_mt_dataset(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 - client = TranslationServiceClient( + +@pytest.mark.asyncio +async def test_create_adaptive_mt_dataset_async( + transport: str = "grpc_asyncio", + request_type=adaptive_mt.CreateAdaptiveMtDatasetRequest, +): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = translation_service.DetectLanguageResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtDataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + ) + ) + response = await client.create_adaptive_mt_dataset(request) - # Convert return value to protobuf type - return_value = translation_service.DetectLanguageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = adaptive_mt.CreateAdaptiveMtDatasetRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtDataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.example_count == 1396 - response = client.detect_language(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_create_adaptive_mt_dataset_async_from_dict(): + await test_create_adaptive_mt_dataset_async(request_type=dict) -def test_detect_language_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_create_adaptive_mt_dataset_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.detect_language._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.CreateAdaptiveMtDatasetRequest() + request.parent = "parent_value" -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_detect_language_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), - ) - client = TranslationServiceClient(transport=transport) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_detect_language" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_detect_language" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = translation_service.DetectLanguageRequest.pb( - translation_service.DetectLanguageRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = translation_service.DetectLanguageResponse.to_json( - translation_service.DetectLanguageResponse() - ) - - request = translation_service.DetectLanguageRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = translation_service.DetectLanguageResponse() + type(client.transport.create_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value = adaptive_mt.AdaptiveMtDataset() + client.create_adaptive_mt_dataset(request) - client.detect_language( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - pre.assert_called_once() - post.assert_called_once() + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_detect_language_rest_bad_request( - transport: str = "rest", request_type=translation_service.DetectLanguageRequest -): - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_create_adaptive_mt_dataset_field_headers_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.CreateAdaptiveMtDatasetRequest() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.detect_language(request) + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtDataset() + ) + await client.create_adaptive_mt_dataset(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_detect_language_rest_flattened(): + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_adaptive_mt_dataset_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = translation_service.DetectLanguageResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtDataset() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_adaptive_mt_dataset( parent="parent_value", - model="model_value", - mime_type="mime_type_value", + adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = translation_service.DetectLanguageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.detect_language(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*}:detectLanguage" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].adaptive_mt_dataset + mock_val = adaptive_mt.AdaptiveMtDataset(name="name_value") + assert arg == mock_val -def test_detect_language_rest_flattened_error(transport: str = "rest"): +def test_create_adaptive_mt_dataset_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.detect_language( - translation_service.DetectLanguageRequest(), + client.create_adaptive_mt_dataset( + adaptive_mt.CreateAdaptiveMtDatasetRequest(), parent="parent_value", - model="model_value", - mime_type="mime_type_value", - content="content_value", + adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), ) -def test_detect_language_rest_error(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" +@pytest.mark.asyncio +async def test_create_adaptive_mt_dataset_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtDataset() -@pytest.mark.parametrize( - "request_type", - [ - translation_service.GetSupportedLanguagesRequest, - dict, + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtDataset() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_adaptive_mt_dataset( + parent="parent_value", + adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].adaptive_mt_dataset + mock_val = adaptive_mt.AdaptiveMtDataset(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_adaptive_mt_dataset_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_adaptive_mt_dataset( + adaptive_mt.CreateAdaptiveMtDatasetRequest(), + parent="parent_value", + adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.DeleteAdaptiveMtDatasetRequest, + dict, ], ) -def test_get_supported_languages_rest(request_type): +def test_delete_adaptive_mt_dataset(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = translation_service.SupportedLanguages() + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = translation_service.SupportedLanguages.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_adaptive_mt_dataset(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_supported_languages(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = adaptive_mt.DeleteAdaptiveMtDatasetRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.SupportedLanguages) + assert response is None -def test_get_supported_languages_rest_use_cached_wrapped_rpc(): +def test_delete_adaptive_mt_dataset_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_adaptive_mt_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.DeleteAdaptiveMtDatasetRequest() + + +def test_delete_adaptive_mt_dataset_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = adaptive_mt.DeleteAdaptiveMtDatasetRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_adaptive_mt_dataset(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.DeleteAdaptiveMtDatasetRequest( + name="name_value", + ) + + +def test_delete_adaptive_mt_dataset_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -10370,7 +10275,7 @@ def test_get_supported_languages_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_supported_languages + client._transport.delete_adaptive_mt_dataset in client._transport._wrapped_methods ) @@ -10380,316 +10285,371 @@ def test_get_supported_languages_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_supported_languages + client._transport.delete_adaptive_mt_dataset ] = mock_rpc - request = {} - client.get_supported_languages(request) + client.delete_adaptive_mt_dataset(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_supported_languages(request) + client.delete_adaptive_mt_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_supported_languages_rest_required_fields( - request_type=translation_service.GetSupportedLanguagesRequest, -): - transport_class = transports.TranslationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_delete_adaptive_mt_dataset_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_adaptive_mt_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.DeleteAdaptiveMtDatasetRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_supported_languages._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with default values are now present +@pytest.mark.asyncio +async def test_delete_adaptive_mt_dataset_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - jsonified_request["parent"] = "parent_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_supported_languages._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "display_language_code", - "model", + # Ensure method has been cached + assert ( + client._client._transport.delete_adaptive_mt_dataset + in client._client._transport._wrapped_methods ) - ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_adaptive_mt_dataset + ] = mock_object - # Designate an appropriate value for the returned response. - return_value = translation_service.SupportedLanguages() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + request = {} + await client.delete_adaptive_mt_dataset(request) - response_value = Response() - response_value.status_code = 200 + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - # Convert return value to protobuf type - return_value = translation_service.SupportedLanguages.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + await client.delete_adaptive_mt_dataset(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 - response = client.get_supported_languages(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_supported_languages_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_supported_languages._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "displayLanguageCode", - "model", - ) - ) - & set(("parent",)) +@pytest.mark.asyncio +async def test_delete_adaptive_mt_dataset_async( + transport: str = "grpc_asyncio", + request_type=adaptive_mt.DeleteAdaptiveMtDatasetRequest, +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_supported_languages_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), - ) - client = TranslationServiceClient(transport=transport) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_get_supported_languages" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_get_supported_languages" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = translation_service.GetSupportedLanguagesRequest.pb( - translation_service.GetSupportedLanguagesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + type(client.transport.delete_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_adaptive_mt_dataset(request) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = translation_service.SupportedLanguages.to_json( - translation_service.SupportedLanguages() - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = adaptive_mt.DeleteAdaptiveMtDatasetRequest() + assert args[0] == request - request = translation_service.GetSupportedLanguagesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = translation_service.SupportedLanguages() + # Establish that the response is the type that we expect. + assert response is None - client.get_supported_languages( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - pre.assert_called_once() - post.assert_called_once() +@pytest.mark.asyncio +async def test_delete_adaptive_mt_dataset_async_from_dict(): + await test_delete_adaptive_mt_dataset_async(request_type=dict) -def test_get_supported_languages_rest_bad_request( - transport: str = "rest", - request_type=translation_service.GetSupportedLanguagesRequest, -): +def test_delete_adaptive_mt_dataset_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.DeleteAdaptiveMtDatasetRequest() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_supported_languages(request) + request.name = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value = None + client.delete_adaptive_mt_dataset(request) -def test_get_supported_languages_rest_flattened(): - client = TranslationServiceClient( + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_adaptive_mt_dataset_field_headers_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = translation_service.SupportedLanguages() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.DeleteAdaptiveMtDatasetRequest() - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + request.name = "name_value" - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - model="model_value", - display_language_code="display_language_code_value", - ) - mock_args.update(sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_adaptive_mt_dataset(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = translation_service.SupportedLanguages.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.get_supported_languages(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_adaptive_mt_dataset_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_adaptive_mt_dataset( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*}/supportedLanguages" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_get_supported_languages_rest_flattened_error(transport: str = "rest"): +def test_delete_adaptive_mt_dataset_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_supported_languages( - translation_service.GetSupportedLanguagesRequest(), - parent="parent_value", - model="model_value", - display_language_code="display_language_code_value", + client.delete_adaptive_mt_dataset( + adaptive_mt.DeleteAdaptiveMtDatasetRequest(), + name="name_value", ) -def test_get_supported_languages_rest_error(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - translation_service.TranslateDocumentRequest, - dict, - ], -) -def test_translate_document_rest(request_type): - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_delete_adaptive_mt_dataset_flattened_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = translation_service.TranslateDocumentResponse( - model="model_value", + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_adaptive_mt_dataset( + name="name_value", ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = translation_service.TranslateDocumentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.translate_document(request) + +@pytest.mark.asyncio +async def test_delete_adaptive_mt_dataset_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_adaptive_mt_dataset( + adaptive_mt.DeleteAdaptiveMtDatasetRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.GetAdaptiveMtDatasetRequest, + dict, + ], +) +def test_get_adaptive_mt_dataset(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtDataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + ) + response = client.get_adaptive_mt_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = adaptive_mt.GetAdaptiveMtDatasetRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.TranslateDocumentResponse) - assert response.model == "model_value" + assert isinstance(response, adaptive_mt.AdaptiveMtDataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.example_count == 1396 -def test_translate_document_rest_use_cached_wrapped_rpc(): +def test_get_adaptive_mt_dataset_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_adaptive_mt_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.GetAdaptiveMtDatasetRequest() + + +def test_get_adaptive_mt_dataset_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = adaptive_mt.GetAdaptiveMtDatasetRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_adaptive_mt_dataset(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.GetAdaptiveMtDatasetRequest( + name="name_value", + ) + + +def test_get_adaptive_mt_dataset_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -10698,7 +10658,8 @@ def test_translate_document_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.translate_document in client._transport._wrapped_methods + client._transport.get_adaptive_mt_dataset + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -10707,252 +10668,392 @@ def test_translate_document_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.translate_document + client._transport.get_adaptive_mt_dataset ] = mock_rpc - request = {} - client.translate_document(request) + client.get_adaptive_mt_dataset(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.translate_document(request) + client.get_adaptive_mt_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_translate_document_rest_required_fields( - request_type=translation_service.TranslateDocumentRequest, +@pytest.mark.asyncio +async def test_get_adaptive_mt_dataset_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtDataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + ) + ) + response = await client.get_adaptive_mt_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.GetAdaptiveMtDatasetRequest() + + +@pytest.mark.asyncio +async def test_get_adaptive_mt_dataset_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.TranslationServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request_init["target_language_code"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.get_adaptive_mt_dataset + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).translate_document._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_adaptive_mt_dataset + ] = mock_object - # verify required fields with default values are now present + request = {} + await client.get_adaptive_mt_dataset(request) - jsonified_request["parent"] = "parent_value" - jsonified_request["targetLanguageCode"] = "target_language_code_value" + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).translate_document._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + await client.get_adaptive_mt_dataset(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "targetLanguageCode" in jsonified_request - assert jsonified_request["targetLanguageCode"] == "target_language_code_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 - client = TranslationServiceClient( + +@pytest.mark.asyncio +async def test_get_adaptive_mt_dataset_async( + transport: str = "grpc_asyncio", + request_type=adaptive_mt.GetAdaptiveMtDatasetRequest, +): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = translation_service.TranslateDocumentResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Convert return value to protobuf type - return_value = translation_service.TranslateDocumentResponse.pb( - return_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtDataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, ) - json_return_value = json_format.MessageToJson(return_value) + ) + response = await client.get_adaptive_mt_dataset(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = adaptive_mt.GetAdaptiveMtDatasetRequest() + assert args[0] == request - response = client.translate_document(request) + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtDataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.example_count == 1396 - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_get_adaptive_mt_dataset_async_from_dict(): + await test_get_adaptive_mt_dataset_async(request_type=dict) -def test_translate_document_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.translate_document._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "targetLanguageCode", - "documentInputConfig", - ) - ) +def test_get_adaptive_mt_dataset_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.GetAdaptiveMtDatasetRequest() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_translate_document_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value = adaptive_mt.AdaptiveMtDataset() + client.get_adaptive_mt_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_adaptive_mt_dataset_field_headers_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), ) - client = TranslationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_translate_document" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_translate_document" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = translation_service.TranslateDocumentRequest.pb( - translation_service.TranslateDocumentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - translation_service.TranslateDocumentResponse.to_json( - translation_service.TranslateDocumentResponse() - ) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.GetAdaptiveMtDatasetRequest() - request = translation_service.TranslateDocumentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = translation_service.TranslateDocumentResponse() + request.name = "name_value" - client.translate_document( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtDataset() ) + await client.get_adaptive_mt_dataset(request) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_translate_document_rest_bad_request( - transport: str = "rest", request_type=translation_service.TranslateDocumentRequest -): +def test_get_adaptive_mt_dataset_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtDataset() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_adaptive_mt_dataset( + name="name_value", + ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.translate_document(request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_translate_document_rest_error(): +def test_get_adaptive_mt_dataset_flattened_error(): client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_adaptive_mt_dataset( + adaptive_mt.GetAdaptiveMtDatasetRequest(), + name="name_value", + ) -@pytest.mark.parametrize( - "request_type", - [ - translation_service.BatchTranslateTextRequest, - dict, - ], -) -def test_batch_translate_text_rest(request_type): - client = TranslationServiceClient( + +@pytest.mark.asyncio +async def test_get_adaptive_mt_dataset_flattened_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtDataset() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtDataset() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_adaptive_mt_dataset( + name="name_value", + ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.batch_translate_text(request) + +@pytest.mark.asyncio +async def test_get_adaptive_mt_dataset_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_adaptive_mt_dataset( + adaptive_mt.GetAdaptiveMtDatasetRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.ListAdaptiveMtDatasetsRequest, + dict, + ], +) +def test_list_adaptive_mt_datasets(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_adaptive_mt_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = adaptive_mt.ListAdaptiveMtDatasetsRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListAdaptiveMtDatasetsPager) + assert response.next_page_token == "next_page_token_value" -def test_batch_translate_text_rest_use_cached_wrapped_rpc(): +def test_list_adaptive_mt_datasets_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_adaptive_mt_datasets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtDatasetsRequest() + + +def test_list_adaptive_mt_datasets_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = adaptive_mt.ListAdaptiveMtDatasetsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_adaptive_mt_datasets(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtDatasetsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + +def test_list_adaptive_mt_datasets_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -10961,7 +11062,8 @@ def test_batch_translate_text_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.batch_translate_text in client._transport._wrapped_methods + client._transport.list_adaptive_mt_datasets + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -10970,692 +11072,582 @@ def test_batch_translate_text_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.batch_translate_text + client._transport.list_adaptive_mt_datasets ] = mock_rpc - request = {} - client.batch_translate_text(request) + client.list_adaptive_mt_datasets(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.batch_translate_text(request) + client.list_adaptive_mt_datasets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_batch_translate_text_rest_required_fields( - request_type=translation_service.BatchTranslateTextRequest, -): - transport_class = transports.TranslationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["source_language_code"] = "" - request_init["target_language_codes"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_list_adaptive_mt_datasets_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).batch_translate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtDatasetsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_adaptive_mt_datasets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtDatasetsRequest() - jsonified_request["parent"] = "parent_value" - jsonified_request["sourceLanguageCode"] = "source_language_code_value" - jsonified_request["targetLanguageCodes"] = "target_language_codes_value" - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).batch_translate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) +@pytest.mark.asyncio +async def test_list_adaptive_mt_datasets_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "sourceLanguageCode" in jsonified_request - assert jsonified_request["sourceLanguageCode"] == "source_language_code_value" - assert "targetLanguageCodes" in jsonified_request - assert jsonified_request["targetLanguageCodes"] == "target_language_codes_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + # Ensure method has been cached + assert ( + client._client._transport.list_adaptive_mt_datasets + in client._client._transport._wrapped_methods + ) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_adaptive_mt_datasets + ] = mock_object - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request = {} + await client.list_adaptive_mt_datasets(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - response = client.batch_translate_text(request) + await client.list_adaptive_mt_datasets(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 -def test_batch_translate_text_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.batch_translate_text._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "sourceLanguageCode", - "targetLanguageCodes", - "inputConfigs", - "outputConfig", - ) - ) +@pytest.mark.asyncio +async def test_list_adaptive_mt_datasets_async( + transport: str = "grpc_asyncio", + request_type=adaptive_mt.ListAdaptiveMtDatasetsRequest, +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_translate_text_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), - ) - client = TranslationServiceClient(transport=transport) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_batch_translate_text" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_batch_translate_text" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = translation_service.BatchTranslateTextRequest.pb( - translation_service.BatchTranslateTextRequest() + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtDatasetsResponse( + next_page_token="next_page_token_value", + ) ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + response = await client.list_adaptive_mt_datasets(request) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = adaptive_mt.ListAdaptiveMtDatasetsRequest() + assert args[0] == request - request = translation_service.BatchTranslateTextRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAdaptiveMtDatasetsAsyncPager) + assert response.next_page_token == "next_page_token_value" - client.batch_translate_text( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - pre.assert_called_once() - post.assert_called_once() +@pytest.mark.asyncio +async def test_list_adaptive_mt_datasets_async_from_dict(): + await test_list_adaptive_mt_datasets_async(request_type=dict) -def test_batch_translate_text_rest_bad_request( - transport: str = "rest", request_type=translation_service.BatchTranslateTextRequest -): +def test_list_adaptive_mt_datasets_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.ListAdaptiveMtDatasetsRequest() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.batch_translate_text(request) + request.parent = "parent_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + call.return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() + client.list_adaptive_mt_datasets(request) -def test_batch_translate_text_rest_error(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -@pytest.mark.parametrize( - "request_type", - [ - translation_service.BatchTranslateDocumentRequest, - dict, - ], -) -def test_batch_translate_document_rest(request_type): - client = TranslationServiceClient( + +@pytest.mark.asyncio +async def test_list_adaptive_mt_datasets_field_headers_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.ListAdaptiveMtDatasetsRequest() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.parent = "parent_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.batch_translate_document(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtDatasetsResponse() + ) + await client.list_adaptive_mt_datasets(request) - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_batch_translate_document_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +def test_list_adaptive_mt_datasets_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Ensure method has been cached - assert ( - client._transport.batch_translate_document - in client._transport._wrapped_methods + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_adaptive_mt_datasets( + parent="parent_value", ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.batch_translate_document - ] = mock_rpc + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val - request = {} - client.batch_translate_document(request) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 +def test_list_adaptive_mt_datasets_flattened_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_adaptive_mt_datasets( + adaptive_mt.ListAdaptiveMtDatasetsRequest(), + parent="parent_value", + ) - client.batch_translate_document(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_batch_translate_document_rest_required_fields( - request_type=translation_service.BatchTranslateDocumentRequest, -): - transport_class = transports.TranslationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["source_language_code"] = "" - request_init["target_language_codes"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).batch_translate_document._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - jsonified_request["sourceLanguageCode"] = "source_language_code_value" - jsonified_request["targetLanguageCodes"] = "target_language_codes_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).batch_translate_document._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "sourceLanguageCode" in jsonified_request - assert jsonified_request["sourceLanguageCode"] == "source_language_code_value" - assert "targetLanguageCodes" in jsonified_request - assert jsonified_request["targetLanguageCodes"] == "target_language_codes_value" - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_list_adaptive_mt_datasets_flattened_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() - response = client.batch_translate_document(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtDatasetsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_adaptive_mt_datasets( + parent="parent_value", + ) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_batch_translate_document_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_list_adaptive_mt_datasets_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.batch_translate_document._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "sourceLanguageCode", - "targetLanguageCodes", - "inputConfigs", - "outputConfig", - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_adaptive_mt_datasets( + adaptive_mt.ListAdaptiveMtDatasetsRequest(), + parent="parent_value", ) - ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_translate_document_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( +def test_list_adaptive_mt_datasets_pager(transport_name: str = "grpc"): + client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), + transport=transport_name, ) - client = TranslationServiceClient(transport=transport) + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_batch_translate_document" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_batch_translate_document" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = translation_service.BatchTranslateDocumentRequest.pb( - translation_service.BatchTranslateDocumentRequest() + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + ), + RuntimeError, ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - - request = translation_service.BatchTranslateDocumentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.batch_translate_document( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + pager = client.list_adaptive_mt_datasets( + request={}, retry=retry, timeout=timeout ) - pre.assert_called_once() - post.assert_called_once() - - -def test_batch_translate_document_rest_bad_request( - transport: str = "rest", - request_type=translation_service.BatchTranslateDocumentRequest, -): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.batch_translate_document(request) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtDataset) for i in results) -def test_batch_translate_document_rest_flattened(): +def test_list_adaptive_mt_datasets_pages(transport_name: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport_name, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - source_language_code="source_language_code_value", - target_language_codes=["target_language_codes_value"], - input_configs=[ - translation_service.BatchDocumentInputConfig( - gcs_source=translation_service.GcsSource( - input_uri="input_uri_value" - ) - ) - ], - output_config=translation_service.BatchDocumentOutputConfig( - gcs_destination=translation_service.GcsDestination( - output_uri_prefix="output_uri_prefix_value" - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], ), + RuntimeError, ) - mock_args.update(sample_request) + pages = list(client.list_adaptive_mt_datasets(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - client.batch_translate_document(**mock_args) +@pytest.mark.asyncio +async def test_list_adaptive_mt_datasets_async_pager(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*}:batchTranslateDocument" - % client.transport._host, - args[1], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_adaptive_mt_datasets( + request={}, ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtDataset) for i in responses) -def test_batch_translate_document_rest_flattened_error(transport: str = "rest"): - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_list_adaptive_mt_datasets_async_pages(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_translate_document( - translation_service.BatchTranslateDocumentRequest(), - parent="parent_value", - source_language_code="source_language_code_value", - target_language_codes=["target_language_codes_value"], - input_configs=[ - translation_service.BatchDocumentInputConfig( - gcs_source=translation_service.GcsSource( - input_uri="input_uri_value" - ) - ) - ], - output_config=translation_service.BatchDocumentOutputConfig( - gcs_destination=translation_service.GcsDestination( - output_uri_prefix="output_uri_prefix_value" - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], ), + RuntimeError, ) - - -def test_batch_translate_document_rest_error(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_adaptive_mt_datasets(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - translation_service.CreateGlossaryRequest, + adaptive_mt.AdaptiveMtTranslateRequest, dict, ], ) -def test_create_glossary_rest(request_type): +def test_adaptive_mt_translate(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["glossary"] = { - "name": "name_value", - "language_pair": { - "source_language_code": "source_language_code_value", - "target_language_code": "target_language_code_value", - }, - "language_codes_set": { - "language_codes": ["language_codes_value1", "language_codes_value2"] - }, - "input_config": {"gcs_source": {"input_uri": "input_uri_value"}}, - "entry_count": 1210, - "submit_time": {"seconds": 751, "nanos": 543}, - "end_time": {}, - "display_name": "display_name_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = translation_service.CreateGlossaryRequest.meta.fields["glossary"] + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.adaptive_mt_translate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtTranslateResponse( + language_code="language_code_value", + ) + response = client.adaptive_mt_translate(request) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = adaptive_mt.AdaptiveMtTranslateRequest() + assert args[0] == request - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtTranslateResponse) + assert response.language_code == "language_code_value" - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["glossary"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +def test_adaptive_mt_translate_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["glossary"][field])): - del request_init["glossary"][field][i][subfield] - else: - del request_init["glossary"][field][subfield] - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.adaptive_mt_translate), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.adaptive_mt_translate() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.AdaptiveMtTranslateRequest() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) +def test_adaptive_mt_translate_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_glossary(request) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = adaptive_mt.AdaptiveMtTranslateRequest( + parent="parent_value", + dataset="dataset_value", + ) - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.adaptive_mt_translate), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.adaptive_mt_translate(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.AdaptiveMtTranslateRequest( + parent="parent_value", + dataset="dataset_value", + ) -def test_create_glossary_rest_use_cached_wrapped_rpc(): +def test_adaptive_mt_translate_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -11663,313 +11655,400 @@ def test_create_glossary_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_glossary in client._transport._wrapped_methods + assert ( + client._transport.adaptive_mt_translate + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_glossary] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.adaptive_mt_translate + ] = mock_rpc request = {} - client.create_glossary(request) + client.adaptive_mt_translate(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_glossary(request) + client.adaptive_mt_translate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_glossary_rest_required_fields( - request_type=translation_service.CreateGlossaryRequest, -): - transport_class = transports.TranslationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_adaptive_mt_translate_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_glossary._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.adaptive_mt_translate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtTranslateResponse( + language_code="language_code_value", + ) + ) + response = await client.adaptive_mt_translate() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.AdaptiveMtTranslateRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_glossary._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" +@pytest.mark.asyncio +async def test_adaptive_mt_translate_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Ensure method has been cached + assert ( + client._client._transport.adaptive_mt_translate + in client._client._transport._wrapped_methods + ) - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.adaptive_mt_translate + ] = mock_object - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + request = {} + await client.adaptive_mt_translate(request) - response = client.create_glossary(request) + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + await client.adaptive_mt_translate(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 -def test_create_glossary_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.create_glossary._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "glossary", - ) - ) +@pytest.mark.asyncio +async def test_adaptive_mt_translate_async( + transport: str = "grpc_asyncio", request_type=adaptive_mt.AdaptiveMtTranslateRequest +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_glossary_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), - ) - client = TranslationServiceClient(transport=transport) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_create_glossary" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_create_glossary" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = translation_service.CreateGlossaryRequest.pb( - translation_service.CreateGlossaryRequest() + type(client.transport.adaptive_mt_translate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtTranslateResponse( + language_code="language_code_value", + ) ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + response = await client.adaptive_mt_translate(request) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = adaptive_mt.AdaptiveMtTranslateRequest() + assert args[0] == request - request = translation_service.CreateGlossaryRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtTranslateResponse) + assert response.language_code == "language_code_value" - client.create_glossary( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - pre.assert_called_once() - post.assert_called_once() +@pytest.mark.asyncio +async def test_adaptive_mt_translate_async_from_dict(): + await test_adaptive_mt_translate_async(request_type=dict) -def test_create_glossary_rest_bad_request( - transport: str = "rest", request_type=translation_service.CreateGlossaryRequest -): +def test_adaptive_mt_translate_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.AdaptiveMtTranslateRequest() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_glossary(request) + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.adaptive_mt_translate), "__call__" + ) as call: + call.return_value = adaptive_mt.AdaptiveMtTranslateResponse() + client.adaptive_mt_translate(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_create_glossary_rest_flattened(): - client = TranslationServiceClient( + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_adaptive_mt_translate_field_headers_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.AdaptiveMtTranslateRequest() - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + request.parent = "parent_value" - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - glossary=translation_service.Glossary(name="name_value"), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.adaptive_mt_translate), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtTranslateResponse() ) - mock_args.update(sample_request) + await client.adaptive_mt_translate(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.create_glossary(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_adaptive_mt_translate_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.adaptive_mt_translate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtTranslateResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.adaptive_mt_translate( + parent="parent_value", + content=["content_value"], + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*}/glossaries" % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].content + mock_val = ["content_value"] + assert arg == mock_val -def test_create_glossary_rest_flattened_error(transport: str = "rest"): +def test_adaptive_mt_translate_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_glossary( - translation_service.CreateGlossaryRequest(), + client.adaptive_mt_translate( + adaptive_mt.AdaptiveMtTranslateRequest(), parent="parent_value", - glossary=translation_service.Glossary(name="name_value"), + content=["content_value"], ) -def test_create_glossary_rest_error(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" +@pytest.mark.asyncio +async def test_adaptive_mt_translate_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.adaptive_mt_translate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtTranslateResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtTranslateResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.adaptive_mt_translate( + parent="parent_value", + content=["content_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].content + mock_val = ["content_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_adaptive_mt_translate_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.adaptive_mt_translate( + adaptive_mt.AdaptiveMtTranslateRequest(), + parent="parent_value", + content=["content_value"], + ) + @pytest.mark.parametrize( "request_type", [ - translation_service.ListGlossariesRequest, + adaptive_mt.GetAdaptiveMtFileRequest, dict, ], ) -def test_list_glossaries_rest(request_type): +def test_get_adaptive_mt_file(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = translation_service.ListGlossariesResponse( - next_page_token="next_page_token_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtFile( + name="name_value", + display_name="display_name_value", + entry_count=1210, ) + response = client.get_adaptive_mt_file(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = translation_service.ListGlossariesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_glossaries(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = adaptive_mt.GetAdaptiveMtFileRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListGlossariesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, adaptive_mt.AdaptiveMtFile) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.entry_count == 1210 -def test_list_glossaries_rest_use_cached_wrapped_rpc(): +def test_get_adaptive_mt_file_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_file), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_adaptive_mt_file() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.GetAdaptiveMtFileRequest() + + +def test_get_adaptive_mt_file_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = adaptive_mt.GetAdaptiveMtFileRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_file), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_adaptive_mt_file(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.GetAdaptiveMtFileRequest( + name="name_value", + ) + + +def test_get_adaptive_mt_file_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -11977,379 +12056,388 @@ def test_list_glossaries_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_glossaries in client._transport._wrapped_methods + assert ( + client._transport.get_adaptive_mt_file in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_glossaries] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.get_adaptive_mt_file + ] = mock_rpc request = {} - client.list_glossaries(request) + client.get_adaptive_mt_file(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_glossaries(request) + client.get_adaptive_mt_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_glossaries_rest_required_fields( - request_type=translation_service.ListGlossariesRequest, -): - transport_class = transports.TranslationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_get_adaptive_mt_file_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_glossaries._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_glossaries._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtFile( + name="name_value", + display_name="display_name_value", + entry_count=1210, + ) ) - ) - jsonified_request.update(unset_fields) + response = await client.get_adaptive_mt_file() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.GetAdaptiveMtFileRequest() - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) +@pytest.mark.asyncio +async def test_get_adaptive_mt_file_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Designate an appropriate value for the returned response. - return_value = translation_service.ListGlossariesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - response_value = Response() - response_value.status_code = 200 + # Ensure method has been cached + assert ( + client._client._transport.get_adaptive_mt_file + in client._client._transport._wrapped_methods + ) - # Convert return value to protobuf type - return_value = translation_service.ListGlossariesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_adaptive_mt_file + ] = mock_object - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + request = {} + await client.get_adaptive_mt_file(request) - response = client.list_glossaries(request) + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + await client.get_adaptive_mt_file(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 -def test_list_glossaries_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.list_glossaries._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) +@pytest.mark.asyncio +async def test_get_adaptive_mt_file_async( + transport: str = "grpc_asyncio", request_type=adaptive_mt.GetAdaptiveMtFileRequest +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_glossaries_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtFile( + name="name_value", + display_name="display_name_value", + entry_count=1210, + ) + ) + response = await client.get_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = adaptive_mt.GetAdaptiveMtFileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtFile) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.entry_count == 1210 + + +@pytest.mark.asyncio +async def test_get_adaptive_mt_file_async_from_dict(): + await test_get_adaptive_mt_file_async(request_type=dict) + + +def test_get_adaptive_mt_file_field_headers(): + client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), ) - client = TranslationServiceClient(transport=transport) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.GetAdaptiveMtFileRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_list_glossaries" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_list_glossaries" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = translation_service.ListGlossariesRequest.pb( - translation_service.ListGlossariesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + type(client.transport.get_adaptive_mt_file), "__call__" + ) as call: + call.return_value = adaptive_mt.AdaptiveMtFile() + client.get_adaptive_mt_file(request) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = translation_service.ListGlossariesResponse.to_json( - translation_service.ListGlossariesResponse() - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - request = translation_service.ListGlossariesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = translation_service.ListGlossariesResponse() + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] - client.list_glossaries( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + +@pytest.mark.asyncio +async def test_get_adaptive_mt_file_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.GetAdaptiveMtFileRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_file), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtFile() ) + await client.get_adaptive_mt_file(request) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_list_glossaries_rest_bad_request( - transport: str = "rest", request_type=translation_service.ListGlossariesRequest -): +def test_get_adaptive_mt_file_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtFile() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_adaptive_mt_file( + name="name_value", + ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_glossaries(request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_list_glossaries_rest_flattened(): +def test_get_adaptive_mt_file_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = translation_service.ListGlossariesResponse() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_adaptive_mt_file( + adaptive_mt.GetAdaptiveMtFileRequest(), + name="name_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) +@pytest.mark.asyncio +async def test_get_adaptive_mt_file_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = translation_service.ListGlossariesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtFile() - client.list_glossaries(**mock_args) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtFile() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_adaptive_mt_file( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*}/glossaries" % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_list_glossaries_rest_flattened_error(transport: str = "rest"): - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_get_adaptive_mt_file_flattened_error_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_glossaries( - translation_service.ListGlossariesRequest(), - parent="parent_value", + await client.get_adaptive_mt_file( + adaptive_mt.GetAdaptiveMtFileRequest(), + name="name_value", ) -def test_list_glossaries_rest_pager(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.DeleteAdaptiveMtFileRequest, + dict, + ], +) +def test_delete_adaptive_mt_file(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - translation_service.Glossary(), - translation_service.Glossary(), - ], - next_page_token="abc", - ), - translation_service.ListGlossariesResponse( - glossaries=[], - next_page_token="def", - ), - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - ], - next_page_token="ghi", - ), - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - translation_service.Glossary(), - ], - ), - ) - # Two responses for two calls - response = response + response + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the values into proper Response objs - response = tuple( - translation_service.ListGlossariesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_adaptive_mt_file(request) - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = adaptive_mt.DeleteAdaptiveMtFileRequest() + assert args[0] == request - pager = client.list_glossaries(request=sample_request) + # Establish that the response is the type that we expect. + assert response is None - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, translation_service.Glossary) for i in results) - pages = list(client.list_glossaries(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_delete_adaptive_mt_file_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_file), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_adaptive_mt_file() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.DeleteAdaptiveMtFileRequest() -@pytest.mark.parametrize( - "request_type", - [ - translation_service.GetGlossaryRequest, - dict, - ], -) -def test_get_glossary_rest(request_type): +def test_delete_adaptive_mt_file_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} - request = request_type(**request_init) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = adaptive_mt.DeleteAdaptiveMtFileRequest( + name="name_value", + ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = translation_service.Glossary( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_file), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_adaptive_mt_file(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.DeleteAdaptiveMtFileRequest( name="name_value", - entry_count=1210, - display_name="display_name_value", ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = translation_service.Glossary.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_glossary(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.Glossary) - assert response.name == "name_value" - assert response.entry_count == 1210 - assert response.display_name == "display_name_value" - -def test_get_glossary_rest_use_cached_wrapped_rpc(): +def test_delete_adaptive_mt_file_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -12357,298 +12445,371 @@ def test_get_glossary_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_glossary in client._transport._wrapped_methods + assert ( + client._transport.delete_adaptive_mt_file + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_glossary] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.delete_adaptive_mt_file + ] = mock_rpc request = {} - client.get_glossary(request) + client.delete_adaptive_mt_file(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_glossary(request) + client.delete_adaptive_mt_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_glossary_rest_required_fields( - request_type=translation_service.GetGlossaryRequest, -): - transport_class = transports.TranslationServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_delete_adaptive_mt_file_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_adaptive_mt_file() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.DeleteAdaptiveMtFileRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_glossary._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with default values are now present +@pytest.mark.asyncio +async def test_delete_adaptive_mt_file_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - jsonified_request["name"] = "name_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_glossary._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.delete_adaptive_mt_file + in client._client._transport._wrapped_methods + ) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_adaptive_mt_file + ] = mock_object - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + request = {} + await client.delete_adaptive_mt_file(request) - # Designate an appropriate value for the returned response. - return_value = translation_service.Glossary() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - response_value = Response() - response_value.status_code = 200 + await client.delete_adaptive_mt_file(request) - # Convert return value to protobuf type - return_value = translation_service.Glossary.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_glossary(request) +@pytest.mark.asyncio +async def test_delete_adaptive_mt_file_async( + transport: str = "grpc_asyncio", + request_type=adaptive_mt.DeleteAdaptiveMtFileRequest, +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_adaptive_mt_file(request) -def test_get_glossary_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = adaptive_mt.DeleteAdaptiveMtFileRequest() + assert args[0] == request - unset_fields = transport.get_glossary._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Establish that the response is the type that we expect. + assert response is None -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_glossary_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( +@pytest.mark.asyncio +async def test_delete_adaptive_mt_file_async_from_dict(): + await test_delete_adaptive_mt_file_async(request_type=dict) + + +def test_delete_adaptive_mt_file_field_headers(): + client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), ) - client = TranslationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_get_glossary" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_get_glossary" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = translation_service.GetGlossaryRequest.pb( - translation_service.GetGlossaryRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = translation_service.Glossary.to_json( - translation_service.Glossary() - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.DeleteAdaptiveMtFileRequest() - request = translation_service.GetGlossaryRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = translation_service.Glossary() + request.name = "name_value" - client.get_glossary( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_file), "__call__" + ) as call: + call.return_value = None + client.delete_adaptive_mt_file(request) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_get_glossary_rest_bad_request( - transport: str = "rest", request_type=translation_service.GetGlossaryRequest -): - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_delete_adaptive_mt_file_field_headers_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.DeleteAdaptiveMtFileRequest() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_glossary(request) + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_file), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_adaptive_mt_file(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_get_glossary_rest_flattened(): + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_adaptive_mt_file_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = translation_service.Glossary() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/glossaries/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_adaptive_mt_file( name="name_value", ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = translation_service.Glossary.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_glossary(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{name=projects/*/locations/*/glossaries/*}" % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_get_glossary_rest_flattened_error(transport: str = "rest"): +def test_delete_adaptive_mt_file_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_glossary( - translation_service.GetGlossaryRequest(), + client.delete_adaptive_mt_file( + adaptive_mt.DeleteAdaptiveMtFileRequest(), name="name_value", ) -def test_get_glossary_rest_error(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" +@pytest.mark.asyncio +async def test_delete_adaptive_mt_file_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_adaptive_mt_file( + name="name_value", + ) -@pytest.mark.parametrize( - "request_type", - [ - translation_service.DeleteGlossaryRequest, - dict, - ], -) -def test_delete_glossary_rest(request_type): - client = TranslationServiceClient( + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_adaptive_mt_file_flattened_error_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} - request = request_type(**request_init) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_adaptive_mt_file( + adaptive_mt.DeleteAdaptiveMtFileRequest(), + name="name_value", + ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.ImportAdaptiveMtFileRequest, + dict, + ], +) +def test_import_adaptive_mt_file(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_glossary(request) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ImportAdaptiveMtFileResponse() + response = client.import_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = adaptive_mt.ImportAdaptiveMtFileRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, adaptive_mt.ImportAdaptiveMtFileResponse) -def test_delete_glossary_rest_use_cached_wrapped_rpc(): +def test_import_adaptive_mt_file_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_adaptive_mt_file), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.import_adaptive_mt_file() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ImportAdaptiveMtFileRequest() + + +def test_import_adaptive_mt_file_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = adaptive_mt.ImportAdaptiveMtFileRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_adaptive_mt_file), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.import_adaptive_mt_file(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ImportAdaptiveMtFileRequest( + parent="parent_value", + ) + + +def test_import_adaptive_mt_file_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -12656,390 +12817,384 @@ def test_delete_glossary_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_glossary in client._transport._wrapped_methods + assert ( + client._transport.import_adaptive_mt_file + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_glossary] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.import_adaptive_mt_file + ] = mock_rpc request = {} - client.delete_glossary(request) + client.import_adaptive_mt_file(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_glossary(request) + client.import_adaptive_mt_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_glossary_rest_required_fields( - request_type=translation_service.DeleteGlossaryRequest, -): - transport_class = transports.TranslationServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_import_adaptive_mt_file_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_glossary._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ImportAdaptiveMtFileResponse() + ) + response = await client.import_adaptive_mt_file() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ImportAdaptiveMtFileRequest() - jsonified_request["name"] = "name_value" - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_glossary._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) +@pytest.mark.asyncio +async def test_import_adaptive_mt_file_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + # Ensure method has been cached + assert ( + client._client._transport.import_adaptive_mt_file + in client._client._transport._wrapped_methods + ) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.import_adaptive_mt_file + ] = mock_object - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request = {} + await client.import_adaptive_mt_file(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - response = client.delete_glossary(request) + await client.import_adaptive_mt_file(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 -def test_delete_glossary_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_import_adaptive_mt_file_async( + transport: str = "grpc_asyncio", + request_type=adaptive_mt.ImportAdaptiveMtFileRequest, +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - unset_fields = transport.delete_glossary._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_glossary_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), - ) - client = TranslationServiceClient(transport=transport) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_delete_glossary" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_delete_glossary" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = translation_service.DeleteGlossaryRequest.pb( - translation_service.DeleteGlossaryRequest() + type(client.transport.import_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ImportAdaptiveMtFileResponse() ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + response = await client.import_adaptive_mt_file(request) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = adaptive_mt.ImportAdaptiveMtFileRequest() + assert args[0] == request - request = translation_service.DeleteGlossaryRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.ImportAdaptiveMtFileResponse) - client.delete_glossary( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - pre.assert_called_once() - post.assert_called_once() +@pytest.mark.asyncio +async def test_import_adaptive_mt_file_async_from_dict(): + await test_import_adaptive_mt_file_async(request_type=dict) -def test_delete_glossary_rest_bad_request( - transport: str = "rest", request_type=translation_service.DeleteGlossaryRequest -): +def test_import_adaptive_mt_file_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.ImportAdaptiveMtFileRequest() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_glossary(request) + request.parent = "parent_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_adaptive_mt_file), "__call__" + ) as call: + call.return_value = adaptive_mt.ImportAdaptiveMtFileResponse() + client.import_adaptive_mt_file(request) -def test_delete_glossary_rest_flattened(): - client = TranslationServiceClient( + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_adaptive_mt_file_field_headers_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.ImportAdaptiveMtFileRequest() - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/glossaries/sample3" - } + request.parent = "parent_value" - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_adaptive_mt_file), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ImportAdaptiveMtFileResponse() ) - mock_args.update(sample_request) + await client.import_adaptive_mt_file(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.delete_glossary(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_import_adaptive_mt_file_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ImportAdaptiveMtFileResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.import_adaptive_mt_file( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{name=projects/*/locations/*/glossaries/*}" % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_delete_glossary_rest_flattened_error(transport: str = "rest"): +def test_import_adaptive_mt_file_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_glossary( - translation_service.DeleteGlossaryRequest(), - name="name_value", + client.import_adaptive_mt_file( + adaptive_mt.ImportAdaptiveMtFileRequest(), + parent="parent_value", ) -def test_delete_glossary_rest_error(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" +@pytest.mark.asyncio +async def test_import_adaptive_mt_file_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ImportAdaptiveMtFileResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ImportAdaptiveMtFileResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.import_adaptive_mt_file( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_import_adaptive_mt_file_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.import_adaptive_mt_file( + adaptive_mt.ImportAdaptiveMtFileRequest(), + parent="parent_value", + ) + @pytest.mark.parametrize( "request_type", [ - adaptive_mt.CreateAdaptiveMtDatasetRequest, + adaptive_mt.ListAdaptiveMtFilesRequest, dict, ], ) -def test_create_adaptive_mt_dataset_rest(request_type): +def test_list_adaptive_mt_files(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["adaptive_mt_dataset"] = { - "name": "name_value", - "display_name": "display_name_value", - "source_language_code": "source_language_code_value", - "target_language_code": "target_language_code_value", - "example_count": 1396, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Determine if the message type is proto-plus or protobuf - test_field = adaptive_mt.CreateAdaptiveMtDatasetRequest.meta.fields[ - "adaptive_mt_dataset" - ] + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtFilesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_adaptive_mt_files(request) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = adaptive_mt.ListAdaptiveMtFilesRequest() + assert args[0] == request - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAdaptiveMtFilesPager) + assert response.next_page_token == "next_page_token_value" - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] +def test_list_adaptive_mt_files_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - subfields_not_in_runtime = [] + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_adaptive_mt_files() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtFilesRequest() - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["adaptive_mt_dataset"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +def test_list_adaptive_mt_files_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["adaptive_mt_dataset"][field])): - del request_init["adaptive_mt_dataset"][field][i][subfield] - else: - del request_init["adaptive_mt_dataset"][field][subfield] - request = request_type(**request_init) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = adaptive_mt.ListAdaptiveMtFilesRequest( + parent="parent_value", + page_token="page_token_value", + ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.AdaptiveMtDataset( - name="name_value", - display_name="display_name_value", - source_language_code="source_language_code_value", - target_language_code="target_language_code_value", - example_count=1396, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_adaptive_mt_files(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtFilesRequest( + parent="parent_value", + page_token="page_token_value", ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_adaptive_mt_dataset(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, adaptive_mt.AdaptiveMtDataset) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_language_code == "source_language_code_value" - assert response.target_language_code == "target_language_code_value" - assert response.example_count == 1396 -def test_create_adaptive_mt_dataset_rest_use_cached_wrapped_rpc(): +def test_list_adaptive_mt_files_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -13048,7 +13203,7 @@ def test_create_adaptive_mt_dataset_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_adaptive_mt_dataset + client._transport.list_adaptive_mt_files in client._transport._wrapped_methods ) @@ -13058,613 +13213,579 @@ def test_create_adaptive_mt_dataset_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_adaptive_mt_dataset + client._transport.list_adaptive_mt_files ] = mock_rpc - request = {} - client.create_adaptive_mt_dataset(request) + client.list_adaptive_mt_files(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_adaptive_mt_dataset(request) + client.list_adaptive_mt_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_adaptive_mt_dataset_rest_required_fields( - request_type=adaptive_mt.CreateAdaptiveMtDatasetRequest, +@pytest.mark.asyncio +async def test_list_adaptive_mt_files_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtFilesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_adaptive_mt_files() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtFilesRequest() + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_files_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.TranslationServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.list_adaptive_mt_files + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_adaptive_mt_files + ] = mock_object - # verify required fields with default values are now present + request = {} + await client.list_adaptive_mt_files(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + await client.list_adaptive_mt_files(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 - client = TranslationServiceClient( + +@pytest.mark.asyncio +async def test_list_adaptive_mt_files_async( + transport: str = "grpc_asyncio", request_type=adaptive_mt.ListAdaptiveMtFilesRequest +): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.AdaptiveMtDataset() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtFilesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_adaptive_mt_files(request) - # Convert return value to protobuf type - return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = adaptive_mt.ListAdaptiveMtFilesRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAdaptiveMtFilesAsyncPager) + assert response.next_page_token == "next_page_token_value" - response = client.create_adaptive_mt_dataset(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_list_adaptive_mt_files_async_from_dict(): + await test_list_adaptive_mt_files_async(request_type=dict) -def test_create_adaptive_mt_dataset_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_list_adaptive_mt_files_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.create_adaptive_mt_dataset._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "adaptiveMtDataset", - ) - ) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.ListAdaptiveMtFilesRequest() + request.parent = "parent_value" -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_adaptive_mt_dataset_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), - ) - client = TranslationServiceClient(transport=transport) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_create_adaptive_mt_dataset" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_create_adaptive_mt_dataset" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = adaptive_mt.CreateAdaptiveMtDatasetRequest.pb( - adaptive_mt.CreateAdaptiveMtDatasetRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = adaptive_mt.AdaptiveMtDataset.to_json( - adaptive_mt.AdaptiveMtDataset() - ) - - request = adaptive_mt.CreateAdaptiveMtDatasetRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = adaptive_mt.AdaptiveMtDataset() + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + call.return_value = adaptive_mt.ListAdaptiveMtFilesResponse() + client.list_adaptive_mt_files(request) - client.create_adaptive_mt_dataset( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - pre.assert_called_once() - post.assert_called_once() + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_create_adaptive_mt_dataset_rest_bad_request( - transport: str = "rest", request_type=adaptive_mt.CreateAdaptiveMtDatasetRequest -): - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_list_adaptive_mt_files_field_headers_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.ListAdaptiveMtFilesRequest() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_adaptive_mt_dataset(request) + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtFilesResponse() + ) + await client.list_adaptive_mt_files(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_create_adaptive_mt_dataset_rest_flattened(): + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_adaptive_mt_files_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.AdaptiveMtDataset() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtFilesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_adaptive_mt_files( parent="parent_value", - adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_adaptive_mt_dataset(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*}/adaptiveMtDatasets" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_create_adaptive_mt_dataset_rest_flattened_error(transport: str = "rest"): +def test_list_adaptive_mt_files_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_adaptive_mt_dataset( - adaptive_mt.CreateAdaptiveMtDatasetRequest(), + client.list_adaptive_mt_files( + adaptive_mt.ListAdaptiveMtFilesRequest(), parent="parent_value", - adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), ) -def test_create_adaptive_mt_dataset_rest_error(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - adaptive_mt.DeleteAdaptiveMtDatasetRequest, - dict, - ], -) -def test_delete_adaptive_mt_dataset_rest(request_type): - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_list_adaptive_mt_files_flattened_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_adaptive_mt_dataset(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_adaptive_mt_dataset_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtFilesResponse() - # Ensure method has been cached - assert ( - client._transport.delete_adaptive_mt_dataset - in client._transport._wrapped_methods + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtFilesResponse() ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_adaptive_mt_files( + parent="parent_value", ) - client._transport._wrapped_methods[ - client._transport.delete_adaptive_mt_dataset - ] = mock_rpc - - request = {} - client.delete_adaptive_mt_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_adaptive_mt_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_delete_adaptive_mt_dataset_rest_required_fields( - request_type=adaptive_mt.DeleteAdaptiveMtDatasetRequest, -): - transport_class = transports.TranslationServiceRestTransport - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_list_adaptive_mt_files_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_adaptive_mt_files( + adaptive_mt.ListAdaptiveMtFilesRequest(), + parent="parent_value", + ) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +def test_list_adaptive_mt_files_pager(transport_name: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport_name, ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 - json_return_value = "" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + ), + RuntimeError, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_adaptive_mt_files(request={}, retry=retry, timeout=timeout) - response = client.delete_adaptive_mt_dataset(request) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtFile) for i in results) -def test_delete_adaptive_mt_dataset_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_list_adaptive_mt_files_pages(transport_name: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, ) - unset_fields = transport.delete_adaptive_mt_dataset._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + ), + RuntimeError, + ) + pages = list(client.list_adaptive_mt_files(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_adaptive_mt_dataset_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( +@pytest.mark.asyncio +async def test_list_adaptive_mt_files_async_pager(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), ) - client = TranslationServiceClient(transport=transport) + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_delete_adaptive_mt_dataset" - ) as pre: - pre.assert_not_called() - pb_message = adaptive_mt.DeleteAdaptiveMtDatasetRequest.pb( - adaptive_mt.DeleteAdaptiveMtDatasetRequest() + type(client.transport.list_adaptive_mt_files), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + ), + RuntimeError, ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = adaptive_mt.DeleteAdaptiveMtDatasetRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_adaptive_mt_dataset( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + async_pager = await client.list_adaptive_mt_files( + request={}, ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - pre.assert_called_once() + assert len(responses) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtFile) for i in responses) -def test_delete_adaptive_mt_dataset_rest_bad_request( - transport: str = "rest", request_type=adaptive_mt.DeleteAdaptiveMtDatasetRequest -): - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_list_adaptive_mt_files_async_pages(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_adaptive_mt_dataset(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_adaptive_mt_files(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_delete_adaptive_mt_dataset_rest_flattened(): +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.ListAdaptiveMtSentencesRequest, + dict, + ], +) +def test_list_adaptive_mt_sentences(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" - } + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtSentencesResponse( + next_page_token="next_page_token_value", ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + response = client.list_adaptive_mt_sentences(request) - client.delete_adaptive_mt_dataset(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = adaptive_mt.ListAdaptiveMtSentencesRequest() + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*}" - % client.transport._host, - args[1], - ) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAdaptiveMtSentencesPager) + assert response.next_page_token == "next_page_token_value" -def test_delete_adaptive_mt_dataset_rest_flattened_error(transport: str = "rest"): +def test_list_adaptive_mt_sentences_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_adaptive_mt_dataset( - adaptive_mt.DeleteAdaptiveMtDatasetRequest(), - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client.list_adaptive_mt_sentences() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtSentencesRequest() -def test_delete_adaptive_mt_dataset_rest_error(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - adaptive_mt.GetAdaptiveMtDatasetRequest, - dict, - ], -) -def test_get_adaptive_mt_dataset_rest(request_type): +def test_list_adaptive_mt_sentences_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" - } - request = request_type(**request_init) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = adaptive_mt.ListAdaptiveMtSentencesRequest( + parent="parent_value", + page_token="page_token_value", + ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.AdaptiveMtDataset( - name="name_value", - display_name="display_name_value", - source_language_code="source_language_code_value", - target_language_code="target_language_code_value", - example_count=1396, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_adaptive_mt_sentences(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtSentencesRequest( + parent="parent_value", + page_token="page_token_value", ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_adaptive_mt_dataset(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, adaptive_mt.AdaptiveMtDataset) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_language_code == "source_language_code_value" - assert response.target_language_code == "target_language_code_value" - assert response.example_count == 1396 -def test_get_adaptive_mt_dataset_rest_use_cached_wrapped_rpc(): +def test_list_adaptive_mt_sentences_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -13673,7 +13794,7 @@ def test_get_adaptive_mt_dataset_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_adaptive_mt_dataset + client._transport.list_adaptive_mt_sentences in client._transport._wrapped_methods ) @@ -13683,682 +13804,969 @@ def test_get_adaptive_mt_dataset_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_adaptive_mt_dataset + client._transport.list_adaptive_mt_sentences ] = mock_rpc - request = {} - client.get_adaptive_mt_dataset(request) + client.list_adaptive_mt_sentences(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_adaptive_mt_dataset(request) + client.list_adaptive_mt_sentences(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_adaptive_mt_dataset_rest_required_fields( - request_type=adaptive_mt.GetAdaptiveMtDatasetRequest, -): - transport_class = transports.TranslationServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_list_adaptive_mt_sentences_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtSentencesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_adaptive_mt_sentences() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtSentencesRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +@pytest.mark.asyncio +async def test_list_adaptive_mt_sentences_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.AdaptiveMtDataset() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Ensure method has been cached + assert ( + client._client._transport.list_adaptive_mt_sentences + in client._client._transport._wrapped_methods + ) - response_value = Response() - response_value.status_code = 200 + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_adaptive_mt_sentences + ] = mock_object - # Convert return value to protobuf type - return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + request = {} + await client.list_adaptive_mt_sentences(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - response = client.get_adaptive_mt_dataset(request) + await client.list_adaptive_mt_sentences(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 -def test_get_adaptive_mt_dataset_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_list_adaptive_mt_sentences_async( + transport: str = "grpc_asyncio", + request_type=adaptive_mt.ListAdaptiveMtSentencesRequest, +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - unset_fields = transport.get_adaptive_mt_dataset._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_adaptive_mt_dataset_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), - ) - client = TranslationServiceClient(transport=transport) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_get_adaptive_mt_dataset" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_get_adaptive_mt_dataset" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = adaptive_mt.GetAdaptiveMtDatasetRequest.pb( - adaptive_mt.GetAdaptiveMtDatasetRequest() + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtSentencesResponse( + next_page_token="next_page_token_value", + ) ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + response = await client.list_adaptive_mt_sentences(request) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = adaptive_mt.AdaptiveMtDataset.to_json( - adaptive_mt.AdaptiveMtDataset() - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = adaptive_mt.ListAdaptiveMtSentencesRequest() + assert args[0] == request - request = adaptive_mt.GetAdaptiveMtDatasetRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = adaptive_mt.AdaptiveMtDataset() + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAdaptiveMtSentencesAsyncPager) + assert response.next_page_token == "next_page_token_value" - client.get_adaptive_mt_dataset( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - pre.assert_called_once() - post.assert_called_once() +@pytest.mark.asyncio +async def test_list_adaptive_mt_sentences_async_from_dict(): + await test_list_adaptive_mt_sentences_async(request_type=dict) -def test_get_adaptive_mt_dataset_rest_bad_request( - transport: str = "rest", request_type=adaptive_mt.GetAdaptiveMtDatasetRequest -): +def test_list_adaptive_mt_sentences_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_adaptive_mt_dataset(request) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.ListAdaptiveMtSentencesRequest() + request.parent = "parent_value" -def test_get_adaptive_mt_dataset_rest_flattened(): - client = TranslationServiceClient( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + call.return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() + client.list_adaptive_mt_sentences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_sentences_field_headers_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.AdaptiveMtDataset() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.ListAdaptiveMtSentencesRequest() - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" - } + request.parent = "parent_value" - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtSentencesResponse() ) - mock_args.update(sample_request) + await client.list_adaptive_mt_sentences(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.get_adaptive_mt_dataset(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_adaptive_mt_sentences_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_adaptive_mt_sentences( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*}" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_get_adaptive_mt_dataset_rest_flattened_error(transport: str = "rest"): +def test_list_adaptive_mt_sentences_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_adaptive_mt_dataset( - adaptive_mt.GetAdaptiveMtDatasetRequest(), - name="name_value", + client.list_adaptive_mt_sentences( + adaptive_mt.ListAdaptiveMtSentencesRequest(), + parent="parent_value", ) -def test_get_adaptive_mt_dataset_rest_error(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - adaptive_mt.ListAdaptiveMtDatasetsRequest, - dict, - ], -) -def test_list_adaptive_mt_datasets_rest(request_type): - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_list_adaptive_mt_sentences_flattened_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse( - next_page_token="next_page_token_value", + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtSentencesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_adaptive_mt_sentences( + parent="parent_value", ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_adaptive_mt_datasets(request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAdaptiveMtDatasetsPager) - assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_adaptive_mt_sentences_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_list_adaptive_mt_datasets_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_adaptive_mt_sentences( + adaptive_mt.ListAdaptiveMtSentencesRequest(), + parent="parent_value", ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - # Ensure method has been cached - assert ( - client._transport.list_adaptive_mt_datasets - in client._transport._wrapped_methods - ) +def test_list_adaptive_mt_sentences_pager(transport_name: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + ), + RuntimeError, ) - client._transport._wrapped_methods[ - client._transport.list_adaptive_mt_datasets - ] = mock_rpc - - request = {} - client.list_adaptive_mt_datasets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - client.list_adaptive_mt_datasets(request) + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_adaptive_mt_sentences( + request={}, retry=retry, timeout=timeout + ) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtSentence) for i in results) -def test_list_adaptive_mt_datasets_rest_required_fields( - request_type=adaptive_mt.ListAdaptiveMtDatasetsRequest, -): - transport_class = transports.TranslationServiceRestTransport - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +def test_list_adaptive_mt_sentences_pages(transport_name: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_adaptive_mt_datasets._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + ), + RuntimeError, + ) + pages = list(client.list_adaptive_mt_sentences(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" +@pytest.mark.asyncio +async def test_list_adaptive_mt_sentences_async_pager(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_adaptive_mt_datasets._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + ), + RuntimeError, ) - ) - jsonified_request.update(unset_fields) + async_pager = await client.list_adaptive_mt_sentences( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert len(responses) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtSentence) for i in responses) - client = TranslationServiceClient( + +@pytest.mark.asyncio +async def test_list_adaptive_mt_sentences_async_pages(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_adaptive_mt_sentences(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) +@pytest.mark.parametrize( + "request_type", + [ + automl_translation.ImportDataRequest, + dict, + ], +) +def test_import_data(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.list_adaptive_mt_datasets(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.import_data(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = automl_translation.ImportDataRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_adaptive_mt_datasets_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.list_adaptive_mt_datasets._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) +def test_import_data_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.import_data() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.ImportDataRequest() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_adaptive_mt_datasets_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( + +def test_import_data_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), + transport="grpc", ) - client = TranslationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_list_adaptive_mt_datasets" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_list_adaptive_mt_datasets" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = adaptive_mt.ListAdaptiveMtDatasetsRequest.pb( - adaptive_mt.ListAdaptiveMtDatasetsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = adaptive_mt.ListAdaptiveMtDatasetsResponse.to_json( - adaptive_mt.ListAdaptiveMtDatasetsResponse() + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = automl_translation.ImportDataRequest( + dataset="dataset_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.import_data(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.ImportDataRequest( + dataset="dataset_value", ) - request = adaptive_mt.ListAdaptiveMtDatasetsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() - client.list_adaptive_mt_datasets( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], +def test_import_data_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - pre.assert_called_once() - post.assert_called_once() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + # Ensure method has been cached + assert client._transport.import_data in client._transport._wrapped_methods -def test_list_adaptive_mt_datasets_rest_bad_request( - transport: str = "rest", request_type=adaptive_mt.ListAdaptiveMtDatasetsRequest -): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.import_data] = mock_rpc + request = {} + client.import_data(request) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_adaptive_mt_datasets(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + client.import_data(request) -def test_list_adaptive_mt_datasets_rest_flattened(): - client = TranslationServiceClient( + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_import_data_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc_asyncio", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.import_data() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.ImportDataRequest() - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", +@pytest.mark.asyncio +async def test_import_data_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - client.list_adaptive_mt_datasets(**mock_args) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*}/adaptiveMtDatasets" - % client.transport._host, - args[1], + # Ensure method has been cached + assert ( + client._client._transport.import_data + in client._client._transport._wrapped_methods ) + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.import_data + ] = mock_object + + request = {} + await client.import_data(request) -def test_list_adaptive_mt_datasets_rest_flattened_error(transport: str = "rest"): - client = TranslationServiceClient( + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.import_data(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_import_data_async( + transport: str = "grpc_asyncio", request_type=automl_translation.ImportDataRequest +): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_adaptive_mt_datasets( - adaptive_mt.ListAdaptiveMtDatasetsRequest(), - parent="parent_value", + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") ) + response = await client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = automl_translation.ImportDataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_adaptive_mt_datasets_rest_pager(transport: str = "rest"): +@pytest.mark.asyncio +async def test_import_data_async_from_dict(): + await test_import_data_async(request_type=dict) + + +def test_import_data_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[ - adaptive_mt.AdaptiveMtDataset(), - adaptive_mt.AdaptiveMtDataset(), - adaptive_mt.AdaptiveMtDataset(), - ], - next_page_token="abc", - ), - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[], - next_page_token="def", - ), - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[ - adaptive_mt.AdaptiveMtDataset(), - ], - next_page_token="ghi", - ), - adaptive_mt.ListAdaptiveMtDatasetsResponse( - adaptive_mt_datasets=[ - adaptive_mt.AdaptiveMtDataset(), - adaptive_mt.AdaptiveMtDataset(), - ], - ), + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = automl_translation.ImportDataRequest() + + request.dataset = "dataset_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "dataset=dataset_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_data_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = automl_translation.ImportDataRequest() + + request.dataset = "dataset_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) - # Two responses for two calls - response = response + response + await client.import_data(request) - # Wrap the values into proper Response objs - response = tuple( - adaptive_mt.ListAdaptiveMtDatasetsResponse.to_json(x) for x in response + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "dataset=dataset_value", + ) in kw["metadata"] + + +def test_import_data_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.import_data( + dataset="dataset_value", + input_config=automl_translation.DatasetInputConfig( + input_files=[ + automl_translation.DatasetInputConfig.InputFile(usage="usage_value") + ] + ), ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].dataset + mock_val = "dataset_value" + assert arg == mock_val + arg = args[0].input_config + mock_val = automl_translation.DatasetInputConfig( + input_files=[ + automl_translation.DatasetInputConfig.InputFile(usage="usage_value") + ] + ) + assert arg == mock_val - pager = client.list_adaptive_mt_datasets(request=sample_request) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, adaptive_mt.AdaptiveMtDataset) for i in results) +def test_import_data_flattened_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - pages = list(client.list_adaptive_mt_datasets(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.import_data( + automl_translation.ImportDataRequest(), + dataset="dataset_value", + input_config=automl_translation.DatasetInputConfig( + input_files=[ + automl_translation.DatasetInputConfig.InputFile(usage="usage_value") + ] + ), + ) + + +@pytest.mark.asyncio +async def test_import_data_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.import_data( + dataset="dataset_value", + input_config=automl_translation.DatasetInputConfig( + input_files=[ + automl_translation.DatasetInputConfig.InputFile(usage="usage_value") + ] + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].dataset + mock_val = "dataset_value" + assert arg == mock_val + arg = args[0].input_config + mock_val = automl_translation.DatasetInputConfig( + input_files=[ + automl_translation.DatasetInputConfig.InputFile(usage="usage_value") + ] + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_import_data_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.import_data( + automl_translation.ImportDataRequest(), + dataset="dataset_value", + input_config=automl_translation.DatasetInputConfig( + input_files=[ + automl_translation.DatasetInputConfig.InputFile(usage="usage_value") + ] + ), + ) @pytest.mark.parametrize( "request_type", [ - adaptive_mt.AdaptiveMtTranslateRequest, + automl_translation.ExportDataRequest, dict, ], ) -def test_adaptive_mt_translate_rest(request_type): +def test_export_data(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.AdaptiveMtTranslateResponse( - language_code="language_code_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = automl_translation.ExportDataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_data_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client.export_data() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.ExportDataRequest() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.AdaptiveMtTranslateResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.adaptive_mt_translate(request) +def test_export_data_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Establish that the response is the type that we expect. - assert isinstance(response, adaptive_mt.AdaptiveMtTranslateResponse) - assert response.language_code == "language_code_value" + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = automl_translation.ExportDataRequest( + dataset="dataset_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.export_data(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.ExportDataRequest( + dataset="dataset_value", + ) -def test_adaptive_mt_translate_rest_use_cached_wrapped_rpc(): +def test_export_data_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -14366,333 +14774,404 @@ def test_adaptive_mt_translate_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.adaptive_mt_translate - in client._transport._wrapped_methods - ) + assert client._transport.export_data in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.adaptive_mt_translate - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.export_data] = mock_rpc request = {} - client.adaptive_mt_translate(request) + client.export_data(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.adaptive_mt_translate(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_adaptive_mt_translate_rest_required_fields( - request_type=adaptive_mt.AdaptiveMtTranslateRequest, -): - transport_class = transports.TranslationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["dataset"] = "" - request_init["content"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_export_data_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).adaptive_mt_translate._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.export_data() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.ExportDataRequest() - jsonified_request["parent"] = "parent_value" - jsonified_request["dataset"] = "dataset_value" - jsonified_request["content"] = "content_value" - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).adaptive_mt_translate._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "dataset" in jsonified_request - assert jsonified_request["dataset"] == "dataset_value" - assert "content" in jsonified_request - assert jsonified_request["content"] == "content_value" +@pytest.mark.asyncio +async def test_export_data_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.AdaptiveMtTranslateResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Ensure method has been cached + assert ( + client._client._transport.export_data + in client._client._transport._wrapped_methods + ) - response_value = Response() - response_value.status_code = 200 + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.export_data + ] = mock_object - # Convert return value to protobuf type - return_value = adaptive_mt.AdaptiveMtTranslateResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + request = {} + await client.export_data(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - response = client.adaptive_mt_translate(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + await client.export_data(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 -def test_adaptive_mt_translate_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.adaptive_mt_translate._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "dataset", - "content", - ) - ) +@pytest.mark.asyncio +async def test_export_data_async( + transport: str = "grpc_asyncio", request_type=automl_translation.ExportDataRequest +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_adaptive_mt_translate_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), - ) - client = TranslationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_adaptive_mt_translate" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_adaptive_mt_translate" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = adaptive_mt.AdaptiveMtTranslateRequest.pb( - adaptive_mt.AdaptiveMtTranslateRequest() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + response = await client.export_data(request) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = adaptive_mt.AdaptiveMtTranslateResponse.to_json( - adaptive_mt.AdaptiveMtTranslateResponse() - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = automl_translation.ExportDataRequest() + assert args[0] == request - request = adaptive_mt.AdaptiveMtTranslateRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = adaptive_mt.AdaptiveMtTranslateResponse() + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - client.adaptive_mt_translate( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - pre.assert_called_once() - post.assert_called_once() +@pytest.mark.asyncio +async def test_export_data_async_from_dict(): + await test_export_data_async(request_type=dict) -def test_adaptive_mt_translate_rest_bad_request( - transport: str = "rest", request_type=adaptive_mt.AdaptiveMtTranslateRequest -): +def test_export_data_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = automl_translation.ExportDataRequest() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.adaptive_mt_translate(request) + request.dataset = "dataset_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_data(request) -def test_adaptive_mt_translate_rest_flattened(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.AdaptiveMtTranslateResponse() + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "dataset=dataset_value", + ) in kw["metadata"] - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - content=["content_value"], - ) - mock_args.update(sample_request) +@pytest.mark.asyncio +async def test_export_data_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.AdaptiveMtTranslateResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = automl_translation.ExportDataRequest() - client.adaptive_mt_translate(**mock_args) + request.dataset = "dataset_value" - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*}:adaptiveMtTranslate" - % client.transport._host, - args[1], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) + await client.export_data(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_adaptive_mt_translate_rest_flattened_error(transport: str = "rest"): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.adaptive_mt_translate( - adaptive_mt.AdaptiveMtTranslateRequest(), - parent="parent_value", - content=["content_value"], - ) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "dataset=dataset_value", + ) in kw["metadata"] -def test_adaptive_mt_translate_rest_error(): +def test_export_data_flattened(): client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.export_data( + dataset="dataset_value", + output_config=automl_translation.DatasetOutputConfig( + gcs_destination=common.GcsOutputDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].dataset + mock_val = "dataset_value" + assert arg == mock_val + arg = args[0].output_config + mock_val = automl_translation.DatasetOutputConfig( + gcs_destination=common.GcsOutputDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ) + assert arg == mock_val + + +def test_export_data_flattened_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_data( + automl_translation.ExportDataRequest(), + dataset="dataset_value", + output_config=automl_translation.DatasetOutputConfig( + gcs_destination=common.GcsOutputDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ), + ) + + +@pytest.mark.asyncio +async def test_export_data_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.export_data( + dataset="dataset_value", + output_config=automl_translation.DatasetOutputConfig( + gcs_destination=common.GcsOutputDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].dataset + mock_val = "dataset_value" + assert arg == mock_val + arg = args[0].output_config + mock_val = automl_translation.DatasetOutputConfig( + gcs_destination=common.GcsOutputDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_export_data_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.export_data( + automl_translation.ExportDataRequest(), + dataset="dataset_value", + output_config=automl_translation.DatasetOutputConfig( + gcs_destination=common.GcsOutputDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ), + ) + @pytest.mark.parametrize( "request_type", [ - adaptive_mt.GetAdaptiveMtFileRequest, + automl_translation.ListExamplesRequest, dict, ], ) -def test_get_adaptive_mt_file_rest(request_type): +def test_list_examples(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" - } - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.AdaptiveMtFile( - name="name_value", - display_name="display_name_value", - entry_count=1210, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_examples), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = automl_translation.ListExamplesResponse( + next_page_token="next_page_token_value", ) + response = client.list_examples(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.AdaptiveMtFile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_adaptive_mt_file(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = automl_translation.ListExamplesRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, adaptive_mt.AdaptiveMtFile) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.entry_count == 1210 + assert isinstance(response, pagers.ListExamplesPager) + assert response.next_page_token == "next_page_token_value" -def test_get_adaptive_mt_file_rest_use_cached_wrapped_rpc(): +def test_list_examples_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_examples), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_examples() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.ListExamplesRequest() + + +def test_list_examples_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = automl_translation.ListExamplesRequest( + parent="parent_value", + filter="filter_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_examples), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_examples(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.ListExamplesRequest( + parent="parent_value", + filter="filter_value", + page_token="page_token_value", + ) + + +def test_list_examples_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -14700,307 +15179,554 @@ def test_get_adaptive_mt_file_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_adaptive_mt_file in client._transport._wrapped_methods - ) + assert client._transport.list_examples in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_adaptive_mt_file - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.list_examples] = mock_rpc request = {} - client.get_adaptive_mt_file(request) + client.list_examples(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_adaptive_mt_file(request) + client.list_examples(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_adaptive_mt_file_rest_required_fields( - request_type=adaptive_mt.GetAdaptiveMtFileRequest, +@pytest.mark.asyncio +async def test_list_examples_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_examples), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + automl_translation.ListExamplesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_examples() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.ListExamplesRequest() + + +@pytest.mark.asyncio +async def test_list_examples_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.TranslationServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.list_examples + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_adaptive_mt_file._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_examples + ] = mock_object - # verify required fields with default values are now present + request = {} + await client.list_examples(request) - jsonified_request["name"] = "name_value" + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_adaptive_mt_file._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + await client.list_examples(request) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 - client = TranslationServiceClient( + +@pytest.mark.asyncio +async def test_list_examples_async( + transport: str = "grpc_asyncio", request_type=automl_translation.ListExamplesRequest +): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.AdaptiveMtFile() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_examples), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + automl_translation.ListExamplesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_examples(request) - # Convert return value to protobuf type - return_value = adaptive_mt.AdaptiveMtFile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = automl_translation.ListExamplesRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExamplesAsyncPager) + assert response.next_page_token == "next_page_token_value" - response = client.get_adaptive_mt_file(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_list_examples_async_from_dict(): + await test_list_examples_async(request_type=dict) -def test_get_adaptive_mt_file_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_list_examples_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.get_adaptive_mt_file._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = automl_translation.ListExamplesRequest() + request.parent = "parent_value" -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_adaptive_mt_file_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_examples), "__call__") as call: + call.return_value = automl_translation.ListExamplesResponse() + client.list_examples(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_examples_field_headers_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), ) - client = TranslationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_get_adaptive_mt_file" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_get_adaptive_mt_file" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = adaptive_mt.GetAdaptiveMtFileRequest.pb( - adaptive_mt.GetAdaptiveMtFileRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = adaptive_mt.AdaptiveMtFile.to_json( - adaptive_mt.AdaptiveMtFile() - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = automl_translation.ListExamplesRequest() - request = adaptive_mt.GetAdaptiveMtFileRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = adaptive_mt.AdaptiveMtFile() + request.parent = "parent_value" - client.get_adaptive_mt_file( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_examples), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + automl_translation.ListExamplesResponse() ) + await client.list_examples(request) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_get_adaptive_mt_file_rest_bad_request( - transport: str = "rest", request_type=adaptive_mt.GetAdaptiveMtFileRequest -): +def test_list_examples_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" - } - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_examples), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = automl_translation.ListExamplesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_examples( + parent="parent_value", + ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_adaptive_mt_file(request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_get_adaptive_mt_file_rest_flattened(): +def test_list_examples_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.AdaptiveMtFile() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_examples( + automl_translation.ListExamplesRequest(), + parent="parent_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" - } - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) +@pytest.mark.asyncio +async def test_list_examples_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.AdaptiveMtFile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_examples), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = automl_translation.ListExamplesResponse() - client.get_adaptive_mt_file(**mock_args) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + automl_translation.ListExamplesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_examples( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_get_adaptive_mt_file_rest_flattened_error(transport: str = "rest"): - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_list_examples_flattened_error_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_adaptive_mt_file( - adaptive_mt.GetAdaptiveMtFileRequest(), - name="name_value", + await client.list_examples( + automl_translation.ListExamplesRequest(), + parent="parent_value", ) -def test_get_adaptive_mt_file_rest_error(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - adaptive_mt.DeleteAdaptiveMtFileRequest, - dict, - ], -) -def test_delete_adaptive_mt_file_rest(request_type): +def test_list_examples_pager(transport_name: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport_name, ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_examples), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + automl_translation.ListExamplesResponse( + examples=[ + automl_translation.Example(), + automl_translation.Example(), + automl_translation.Example(), + ], + next_page_token="abc", + ), + automl_translation.ListExamplesResponse( + examples=[], + next_page_token="def", + ), + automl_translation.ListExamplesResponse( + examples=[ + automl_translation.Example(), + ], + next_page_token="ghi", + ), + automl_translation.ListExamplesResponse( + examples=[ + automl_translation.Example(), + automl_translation.Example(), + ], + ), + RuntimeError, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_adaptive_mt_file(request) + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_examples(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, automl_translation.Example) for i in results) + + +def test_list_examples_pages(transport_name: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_examples), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + automl_translation.ListExamplesResponse( + examples=[ + automl_translation.Example(), + automl_translation.Example(), + automl_translation.Example(), + ], + next_page_token="abc", + ), + automl_translation.ListExamplesResponse( + examples=[], + next_page_token="def", + ), + automl_translation.ListExamplesResponse( + examples=[ + automl_translation.Example(), + ], + next_page_token="ghi", + ), + automl_translation.ListExamplesResponse( + examples=[ + automl_translation.Example(), + automl_translation.Example(), + ], + ), + RuntimeError, + ) + pages = list(client.list_examples(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_examples_async_pager(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_examples), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + automl_translation.ListExamplesResponse( + examples=[ + automl_translation.Example(), + automl_translation.Example(), + automl_translation.Example(), + ], + next_page_token="abc", + ), + automl_translation.ListExamplesResponse( + examples=[], + next_page_token="def", + ), + automl_translation.ListExamplesResponse( + examples=[ + automl_translation.Example(), + ], + next_page_token="ghi", + ), + automl_translation.ListExamplesResponse( + examples=[ + automl_translation.Example(), + automl_translation.Example(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_examples( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, automl_translation.Example) for i in responses) + + +@pytest.mark.asyncio +async def test_list_examples_async_pages(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_examples), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + automl_translation.ListExamplesResponse( + examples=[ + automl_translation.Example(), + automl_translation.Example(), + automl_translation.Example(), + ], + next_page_token="abc", + ), + automl_translation.ListExamplesResponse( + examples=[], + next_page_token="def", + ), + automl_translation.ListExamplesResponse( + examples=[ + automl_translation.Example(), + ], + next_page_token="ghi", + ), + automl_translation.ListExamplesResponse( + examples=[ + automl_translation.Example(), + automl_translation.Example(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_examples(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + automl_translation.CreateModelRequest, + dict, + ], +) +def test_create_model(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = automl_translation.CreateModelRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, future.Future) -def test_delete_adaptive_mt_file_rest_use_cached_wrapped_rpc(): +def test_create_model_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_model), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.CreateModelRequest() + + +def test_create_model_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = automl_translation.CreateModelRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_model), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_model(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.CreateModelRequest( + parent="parent_value", + ) + + +def test_create_model_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -15008,297 +15734,380 @@ def test_delete_adaptive_mt_file_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_adaptive_mt_file - in client._transport._wrapped_methods - ) + assert client._transport.create_model in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_adaptive_mt_file - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.create_model] = mock_rpc request = {} - client.delete_adaptive_mt_file(request) + client.create_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_adaptive_mt_file(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_adaptive_mt_file_rest_required_fields( - request_type=adaptive_mt.DeleteAdaptiveMtFileRequest, -): - transport_class = transports.TranslationServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_create_model_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.CreateModelRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_adaptive_mt_file._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with default values are now present +@pytest.mark.asyncio +async def test_create_model_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - jsonified_request["name"] = "name_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_adaptive_mt_file._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.create_model + in client._client._transport._wrapped_methods + ) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_model + ] = mock_object - client = TranslationServiceClient( + request = {} + await client.create_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_model_async( + transport: str = "grpc_asyncio", request_type=automl_translation.CreateModelRequest +): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 - json_return_value = "" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_model(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = automl_translation.CreateModelRequest() + assert args[0] == request - response = client.delete_adaptive_mt_file(request) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_create_model_async_from_dict(): + await test_create_model_async(request_type=dict) -def test_delete_adaptive_mt_file_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials + +def test_create_model_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.delete_adaptive_mt_file._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = automl_translation.CreateModelRequest() + + request.parent = "parent_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_model), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_model(request) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_adaptive_mt_file_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_model_field_headers_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), ) - client = TranslationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_delete_adaptive_mt_file" - ) as pre: - pre.assert_not_called() - pb_message = adaptive_mt.DeleteAdaptiveMtFileRequest.pb( - adaptive_mt.DeleteAdaptiveMtFileRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = automl_translation.CreateModelRequest() - request = adaptive_mt.DeleteAdaptiveMtFileRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata + request.parent = "parent_value" - client.delete_adaptive_mt_file( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_model), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) + await client.create_model(request) - pre.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_delete_adaptive_mt_file_rest_bad_request( - transport: str = "rest", request_type=adaptive_mt.DeleteAdaptiveMtFileRequest -): +def test_create_model_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" - } - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_model( + parent="parent_value", + model=automl_translation.Model(name="name_value"), + ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_adaptive_mt_file(request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].model + mock_val = automl_translation.Model(name="name_value") + assert arg == mock_val -def test_delete_adaptive_mt_file_rest_flattened(): +def test_create_model_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_model( + automl_translation.CreateModelRequest(), + parent="parent_value", + model=automl_translation.Model(name="name_value"), + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" - } - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) +@pytest.mark.asyncio +async def test_create_model_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_adaptive_mt_file(**mock_args) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_model( + parent="parent_value", + model=automl_translation.Model(name="name_value"), + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].model + mock_val = automl_translation.Model(name="name_value") + assert arg == mock_val -def test_delete_adaptive_mt_file_rest_flattened_error(transport: str = "rest"): - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_create_model_flattened_error_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_adaptive_mt_file( - adaptive_mt.DeleteAdaptiveMtFileRequest(), - name="name_value", + await client.create_model( + automl_translation.CreateModelRequest(), + parent="parent_value", + model=automl_translation.Model(name="name_value"), ) -def test_delete_adaptive_mt_file_rest_error(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - @pytest.mark.parametrize( "request_type", [ - adaptive_mt.ImportAdaptiveMtFileRequest, + automl_translation.ListModelsRequest, dict, ], ) -def test_import_adaptive_mt_file_rest(request_type): +def test_list_models(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.ImportAdaptiveMtFileResponse() + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.ImportAdaptiveMtFileResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = automl_translation.ListModelsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_models(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.import_adaptive_mt_file(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = automl_translation.ListModelsRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, adaptive_mt.ImportAdaptiveMtFileResponse) + assert isinstance(response, pagers.ListModelsPager) + assert response.next_page_token == "next_page_token_value" -def test_import_adaptive_mt_file_rest_use_cached_wrapped_rpc(): +def test_list_models_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.ListModelsRequest() + + +def test_list_models_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = automl_translation.ListModelsRequest( + parent="parent_value", + filter="filter_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_models(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.ListModelsRequest( + parent="parent_value", + filter="filter_value", + page_token="page_token_value", + ) + + +def test_list_models_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -15306,702 +16115,951 @@ def test_import_adaptive_mt_file_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.import_adaptive_mt_file - in client._transport._wrapped_methods - ) + assert client._transport.list_models in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_adaptive_mt_file - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.list_models] = mock_rpc request = {} - client.import_adaptive_mt_file(request) + client.list_models(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.import_adaptive_mt_file(request) + client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_import_adaptive_mt_file_rest_required_fields( - request_type=adaptive_mt.ImportAdaptiveMtFileRequest, +@pytest.mark.asyncio +async def test_list_models_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + automl_translation.ListModelsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.ListModelsRequest() + + +@pytest.mark.asyncio +async def test_list_models_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.TranslationServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.list_models + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).import_adaptive_mt_file._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_models + ] = mock_object - # verify required fields with default values are now present + request = {} + await client.list_models(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).import_adaptive_mt_file._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + await client.list_models(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 - client = TranslationServiceClient( + +@pytest.mark.asyncio +async def test_list_models_async( + transport: str = "grpc_asyncio", request_type=automl_translation.ListModelsRequest +): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.ImportAdaptiveMtFileResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + automl_translation.ListModelsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_models(request) - # Convert return value to protobuf type - return_value = adaptive_mt.ImportAdaptiveMtFileResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = automl_translation.ListModelsRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListModelsAsyncPager) + assert response.next_page_token == "next_page_token_value" - response = client.import_adaptive_mt_file(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_list_models_async_from_dict(): + await test_list_models_async(request_type=dict) -def test_import_adaptive_mt_file_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_list_models_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.import_adaptive_mt_file._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = automl_translation.ListModelsRequest() + request.parent = "parent_value" -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_adaptive_mt_file_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + call.return_value = automl_translation.ListModelsResponse() + client.list_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_models_field_headers_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), ) - client = TranslationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_import_adaptive_mt_file" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_import_adaptive_mt_file" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = adaptive_mt.ImportAdaptiveMtFileRequest.pb( - adaptive_mt.ImportAdaptiveMtFileRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = adaptive_mt.ImportAdaptiveMtFileResponse.to_json( - adaptive_mt.ImportAdaptiveMtFileResponse() - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = automl_translation.ListModelsRequest() - request = adaptive_mt.ImportAdaptiveMtFileRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = adaptive_mt.ImportAdaptiveMtFileResponse() + request.parent = "parent_value" - client.import_adaptive_mt_file( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + automl_translation.ListModelsResponse() ) + await client.list_models(request) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_import_adaptive_mt_file_rest_bad_request( - transport: str = "rest", request_type=adaptive_mt.ImportAdaptiveMtFileRequest -): +def test_list_models_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" - } - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = automl_translation.ListModelsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_models( + parent="parent_value", + ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.import_adaptive_mt_file(request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_import_adaptive_mt_file_rest_flattened(): +def test_list_models_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.ImportAdaptiveMtFileResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_models( + automl_translation.ListModelsRequest(), parent="parent_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.ImportAdaptiveMtFileResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - client.import_adaptive_mt_file(**mock_args) +@pytest.mark.asyncio +async def test_list_models_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = automl_translation.ListModelsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + automl_translation.ListModelsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_models( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*}:importAdaptiveMtFile" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_import_adaptive_mt_file_rest_flattened_error(transport: str = "rest"): - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_list_models_flattened_error_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.import_adaptive_mt_file( - adaptive_mt.ImportAdaptiveMtFileRequest(), + await client.list_models( + automl_translation.ListModelsRequest(), parent="parent_value", ) -def test_import_adaptive_mt_file_rest_error(): - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - adaptive_mt.ListAdaptiveMtFilesRequest, - dict, - ], -) -def test_list_adaptive_mt_files_rest(request_type): +def test_list_models_pager(transport_name: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport_name, ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" - } - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + automl_translation.ListModelsResponse( + models=[ + automl_translation.Model(), + automl_translation.Model(), + automl_translation.Model(), + ], + next_page_token="abc", + ), + automl_translation.ListModelsResponse( + models=[], + next_page_token="def", + ), + automl_translation.ListModelsResponse( + models=[ + automl_translation.Model(), + ], + next_page_token="ghi", + ), + automl_translation.ListModelsResponse( + models=[ + automl_translation.Model(), + automl_translation.Model(), + ], + ), + RuntimeError, + ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.ListAdaptiveMtFilesResponse( - next_page_token="next_page_token_value", + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) + pager = client.list_models(request={}, retry=retry, timeout=timeout) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.ListAdaptiveMtFilesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_adaptive_mt_files(request) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, automl_translation.Model) for i in results) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAdaptiveMtFilesPager) - assert response.next_page_token == "next_page_token_value" +def test_list_models_pages(transport_name: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) -def test_list_adaptive_mt_files_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + automl_translation.ListModelsResponse( + models=[ + automl_translation.Model(), + automl_translation.Model(), + automl_translation.Model(), + ], + next_page_token="abc", + ), + automl_translation.ListModelsResponse( + models=[], + next_page_token="def", + ), + automl_translation.ListModelsResponse( + models=[ + automl_translation.Model(), + ], + next_page_token="ghi", + ), + automl_translation.ListModelsResponse( + models=[ + automl_translation.Model(), + automl_translation.Model(), + ], + ), + RuntimeError, ) + pages = list(client.list_models(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_adaptive_mt_files - in client._transport._wrapped_methods - ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_adaptive_mt_files - ] = mock_rpc +@pytest.mark.asyncio +async def test_list_models_async_pager(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - request = {} - client.list_adaptive_mt_files(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_models), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + automl_translation.ListModelsResponse( + models=[ + automl_translation.Model(), + automl_translation.Model(), + automl_translation.Model(), + ], + next_page_token="abc", + ), + automl_translation.ListModelsResponse( + models=[], + next_page_token="def", + ), + automl_translation.ListModelsResponse( + models=[ + automl_translation.Model(), + ], + next_page_token="ghi", + ), + automl_translation.ListModelsResponse( + models=[ + automl_translation.Model(), + automl_translation.Model(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_models( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + assert len(responses) == 6 + assert all(isinstance(i, automl_translation.Model) for i in responses) - client.list_adaptive_mt_files(request) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 +@pytest.mark.asyncio +async def test_list_models_async_pages(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_models), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + automl_translation.ListModelsResponse( + models=[ + automl_translation.Model(), + automl_translation.Model(), + automl_translation.Model(), + ], + next_page_token="abc", + ), + automl_translation.ListModelsResponse( + models=[], + next_page_token="def", + ), + automl_translation.ListModelsResponse( + models=[ + automl_translation.Model(), + ], + next_page_token="ghi", + ), + automl_translation.ListModelsResponse( + models=[ + automl_translation.Model(), + automl_translation.Model(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_models(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_list_adaptive_mt_files_rest_required_fields( - request_type=adaptive_mt.ListAdaptiveMtFilesRequest, -): - transport_class = transports.TranslationServiceRestTransport - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.parametrize( + "request_type", + [ + automl_translation.GetModelRequest, + dict, + ], +) +def test_get_model(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # verify fields with default values are dropped + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_adaptive_mt_files._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = automl_translation.Model( + name="name_value", + display_name="display_name_value", + dataset="dataset_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + train_example_count=2033, + validate_example_count=2333, + test_example_count=1939, + ) + response = client.get_model(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = automl_translation.GetModelRequest() + assert args[0] == request - jsonified_request["parent"] = "parent_value" + # Establish that the response is the type that we expect. + assert isinstance(response, automl_translation.Model) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.dataset == "dataset_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.train_example_count == 2033 + assert response.validate_example_count == 2333 + assert response.test_example_count == 1939 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_adaptive_mt_files._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) + +def test_get_model_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.GetModelRequest() + +def test_get_model_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.ListAdaptiveMtFilesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = automl_translation.GetModelRequest( + name="name_value", + ) - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_model(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.GetModelRequest( + name="name_value", + ) - # Convert return value to protobuf type - return_value = adaptive_mt.ListAdaptiveMtFilesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value +def test_get_model_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - response = client.list_adaptive_mt_files(request) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Ensure method has been cached + assert client._transport.get_model in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_model] = mock_rpc + request = {} + client.get_model(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_list_adaptive_mt_files_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) + client.get_model(request) - unset_fields = transport.list_adaptive_mt_files._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_adaptive_mt_files_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( +@pytest.mark.asyncio +async def test_get_model_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), + transport="grpc_asyncio", ) - client = TranslationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_list_adaptive_mt_files" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_list_adaptive_mt_files" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = adaptive_mt.ListAdaptiveMtFilesRequest.pb( - adaptive_mt.ListAdaptiveMtFilesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + automl_translation.Model( + name="name_value", + display_name="display_name_value", + dataset="dataset_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + train_example_count=2033, + validate_example_count=2333, + test_example_count=1939, + ) ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + response = await client.get_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.GetModelRequest() - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = adaptive_mt.ListAdaptiveMtFilesResponse.to_json( - adaptive_mt.ListAdaptiveMtFilesResponse() + +@pytest.mark.asyncio +async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - request = adaptive_mt.ListAdaptiveMtFilesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = adaptive_mt.ListAdaptiveMtFilesResponse() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - client.list_adaptive_mt_files( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Ensure method has been cached + assert ( + client._client._transport.get_model + in client._client._transport._wrapped_methods ) - pre.assert_called_once() - post.assert_called_once() + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_model + ] = mock_object + request = {} + await client.get_model(request) -def test_list_adaptive_mt_files_rest_bad_request( - transport: str = "rest", request_type=adaptive_mt.ListAdaptiveMtFilesRequest + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_model_async( + transport: str = "grpc_asyncio", request_type=automl_translation.GetModelRequest ): - client = TranslationServiceClient( + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" - } - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_adaptive_mt_files(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + automl_translation.Model( + name="name_value", + display_name="display_name_value", + dataset="dataset_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + train_example_count=2033, + validate_example_count=2333, + test_example_count=1939, + ) + ) + response = await client.get_model(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = automl_translation.GetModelRequest() + assert args[0] == request -def test_list_adaptive_mt_files_rest_flattened(): + # Establish that the response is the type that we expect. + assert isinstance(response, automl_translation.Model) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.dataset == "dataset_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.train_example_count == 2033 + assert response.validate_example_count == 2333 + assert response.test_example_count == 1939 + + +@pytest.mark.asyncio +async def test_get_model_async_from_dict(): + await test_get_model_async(request_type=dict) + + +def test_get_model_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.ListAdaptiveMtFilesResponse() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = automl_translation.GetModelRequest() - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" - } + request.name = "name_value" - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + call.return_value = automl_translation.Model() + client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_model_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = automl_translation.GetModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + automl_translation.Model() ) - mock_args.update(sample_request) + await client.get_model(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.ListAdaptiveMtFilesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.list_adaptive_mt_files(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_model_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = automl_translation.Model() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_model( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*}/adaptiveMtFiles" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_list_adaptive_mt_files_rest_flattened_error(transport: str = "rest"): +def test_get_model_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_adaptive_mt_files( - adaptive_mt.ListAdaptiveMtFilesRequest(), - parent="parent_value", + client.get_model( + automl_translation.GetModelRequest(), + name="name_value", ) -def test_list_adaptive_mt_files_rest_pager(transport: str = "rest"): - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_get_model_flattened_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[ - adaptive_mt.AdaptiveMtFile(), - adaptive_mt.AdaptiveMtFile(), - adaptive_mt.AdaptiveMtFile(), - ], - next_page_token="abc", - ), - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[], - next_page_token="def", - ), - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[ - adaptive_mt.AdaptiveMtFile(), - ], - next_page_token="ghi", - ), - adaptive_mt.ListAdaptiveMtFilesResponse( - adaptive_mt_files=[ - adaptive_mt.AdaptiveMtFile(), - adaptive_mt.AdaptiveMtFile(), - ], - ), - ) - # Two responses for two calls - response = response + response + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = automl_translation.Model() - # Wrap the values into proper Response objs - response = tuple( - adaptive_mt.ListAdaptiveMtFilesResponse.to_json(x) for x in response + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + automl_translation.Model() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_model( + name="name_value", ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" - } - pager = client.list_adaptive_mt_files(request=sample_request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, adaptive_mt.AdaptiveMtFile) for i in results) - pages = list(client.list_adaptive_mt_files(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +@pytest.mark.asyncio +async def test_get_model_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_model( + automl_translation.GetModelRequest(), + name="name_value", + ) @pytest.mark.parametrize( "request_type", [ - adaptive_mt.ListAdaptiveMtSentencesRequest, + automl_translation.DeleteModelRequest, dict, ], ) -def test_list_adaptive_mt_sentences_rest(request_type): +def test_delete_model(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" - } - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.ListAdaptiveMtSentencesResponse( - next_page_token="next_page_token_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = automl_translation.DeleteModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_model_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_model), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client.delete_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.DeleteModelRequest() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.ListAdaptiveMtSentencesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_adaptive_mt_sentences(request) +def test_delete_model_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAdaptiveMtSentencesPager) - assert response.next_page_token == "next_page_token_value" + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = automl_translation.DeleteModelRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_model), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_model(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.DeleteModelRequest( + name="name_value", + ) -def test_list_adaptive_mt_sentences_rest_use_cached_wrapped_rpc(): +def test_delete_model_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -16009,522 +17067,13417 @@ def test_list_adaptive_mt_sentences_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_adaptive_mt_sentences - in client._transport._wrapped_methods - ) + assert client._transport.delete_model in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_adaptive_mt_sentences - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.delete_model] = mock_rpc request = {} - client.list_adaptive_mt_sentences(request) + client.delete_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_adaptive_mt_sentences(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_adaptive_mt_sentences_rest_required_fields( - request_type=adaptive_mt.ListAdaptiveMtSentencesRequest, -): - transport_class = transports.TranslationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_delete_model_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == automl_translation.DeleteModelRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_adaptive_mt_sentences._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with default values are now present +@pytest.mark.asyncio +async def test_delete_model_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - jsonified_request["parent"] = "parent_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_adaptive_mt_sentences._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", + # Ensure method has been cached + assert ( + client._client._transport.delete_model + in client._client._transport._wrapped_methods ) - ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_model + ] = mock_object - client = TranslationServiceClient( + request = {} + await client.delete_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_model_async( + transport: str = "grpc_asyncio", request_type=automl_translation.DeleteModelRequest +): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_model(request) - # Convert return value to protobuf type - return_value = adaptive_mt.ListAdaptiveMtSentencesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = automl_translation.DeleteModelRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - response = client.list_adaptive_mt_sentences(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_delete_model_async_from_dict(): + await test_delete_model_async(request_type=dict) -def test_list_adaptive_mt_sentences_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_delete_model_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.list_adaptive_mt_sentences._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = automl_translation.DeleteModelRequest() + request.name = "name_value" -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_adaptive_mt_sentences_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_model), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_model_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) - client = TranslationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_list_adaptive_mt_sentences" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_list_adaptive_mt_sentences" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = adaptive_mt.ListAdaptiveMtSentencesRequest.pb( - adaptive_mt.ListAdaptiveMtSentencesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = adaptive_mt.ListAdaptiveMtSentencesResponse.to_json( - adaptive_mt.ListAdaptiveMtSentencesResponse() - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = automl_translation.DeleteModelRequest() - request = adaptive_mt.ListAdaptiveMtSentencesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() + request.name = "name_value" - client.list_adaptive_mt_sentences( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_model), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) + await client.delete_model(request) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_list_adaptive_mt_sentences_rest_bad_request( - transport: str = "rest", request_type=adaptive_mt.ListAdaptiveMtSentencesRequest -): + +def test_delete_model_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" - } - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_model( + name="name_value", + ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_adaptive_mt_sentences(request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_list_adaptive_mt_sentences_rest_flattened(): +def test_delete_model_flattened_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_model( + automl_translation.DeleteModelRequest(), + name="name_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" - } - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) +@pytest.mark.asyncio +async def test_delete_model_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = adaptive_mt.ListAdaptiveMtSentencesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") - client.list_adaptive_mt_sentences(**mock_args) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_model( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}/adaptiveMtSentences" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_list_adaptive_mt_sentences_rest_flattened_error(transport: str = "rest"): - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_delete_model_flattened_error_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_adaptive_mt_sentences( - adaptive_mt.ListAdaptiveMtSentencesRequest(), - parent="parent_value", + await client.delete_model( + automl_translation.DeleteModelRequest(), + name="name_value", ) -def test_list_adaptive_mt_sentences_rest_pager(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + translation_service.TranslateTextRequest, + dict, + ], +) +def test_translate_text_rest(request_type): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[ - adaptive_mt.AdaptiveMtSentence(), - adaptive_mt.AdaptiveMtSentence(), - adaptive_mt.AdaptiveMtSentence(), - ], - next_page_token="abc", - ), - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[], - next_page_token="def", - ), - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[ - adaptive_mt.AdaptiveMtSentence(), - ], - next_page_token="ghi", - ), - adaptive_mt.ListAdaptiveMtSentencesResponse( - adaptive_mt_sentences=[ - adaptive_mt.AdaptiveMtSentence(), - adaptive_mt.AdaptiveMtSentence(), - ], - ), - ) - # Two responses for two calls - response = response + response + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.TranslateTextResponse() - # Wrap the values into proper Response objs - response = tuple( - adaptive_mt.ListAdaptiveMtSentencesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.TranslateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - sample_request = { - "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" - } + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.translate_text(request) - pager = client.list_adaptive_mt_sentences(request=sample_request) + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.TranslateTextResponse) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, adaptive_mt.AdaptiveMtSentence) for i in results) - pages = list(client.list_adaptive_mt_sentences(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.TranslationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): +def test_translate_text_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.TranslationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TranslationServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # It is an error to provide an api_key and a transport instance. - transport = transports.TranslationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = TranslationServiceClient( - client_options=options, - transport=transport, - ) + # Ensure method has been cached + assert client._transport.translate_text in client._transport._wrapped_methods - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = TranslationServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[client._transport.translate_text] = mock_rpc - # It is an error to provide scopes and a transport instance. - transport = transports.TranslationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TranslationServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + request = {} + client.translate_text(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.TranslationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = TranslationServiceClient(transport=transport) - assert client.transport is transport + client.translate_text(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.TranslationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - transport = transports.TranslationServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), +def test_translate_text_rest_required_fields( + request_type=translation_service.TranslateTextRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["contents"] = "" + request_init["target_language_code"] = "" + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - channel = transport.grpc_channel - assert channel + # verify fields with default values are dropped -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranslationServiceGrpcTransport, - transports.TranslationServiceGrpcAsyncIOTransport, - transports.TranslationServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).translate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with default values are now present -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = TranslationServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name + jsonified_request["contents"] = "contents_value" + jsonified_request["targetLanguageCode"] = "target_language_code_value" + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).translate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "contents" in jsonified_request + assert jsonified_request["contents"] == "contents_value" + assert "targetLanguageCode" in jsonified_request + assert jsonified_request["targetLanguageCode"] == "target_language_code_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert isinstance( - client.transport, - transports.TranslationServiceGrpcTransport, - ) + request = request_type(**request_init) + # Designate an appropriate value for the returned response. + return_value = translation_service.TranslateTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result -def test_translation_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.TranslationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.TranslateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) -def test_translation_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.translate_v3.services.translation_service.transports.TranslationServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.TranslationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "translate_text", - "detect_language", - "get_supported_languages", - "translate_document", - "batch_translate_text", - "batch_translate_document", - "create_glossary", - "list_glossaries", - "get_glossary", - "delete_glossary", - "create_adaptive_mt_dataset", - "delete_adaptive_mt_dataset", - "get_adaptive_mt_dataset", - "list_adaptive_mt_datasets", - "adaptive_mt_translate", - "get_adaptive_mt_file", - "delete_adaptive_mt_file", - "import_adaptive_mt_file", - "list_adaptive_mt_files", - "list_adaptive_mt_sentences", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) + response = client.translate_text(request) - with pytest.raises(NotImplementedError): - transport.close() + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() +def test_translate_text_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + unset_fields = transport.translate_text._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "contents", + "targetLanguageCode", + "parent", + ) + ) + ) -def test_translation_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_translate_text_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_translate_text" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_translate_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.TranslateTextRequest.pb( + translation_service.TranslateTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = translation_service.TranslateTextResponse.to_json( + translation_service.TranslateTextResponse() + ) + + request = translation_service.TranslateTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = translation_service.TranslateTextResponse() + + client.translate_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_translate_text_rest_bad_request( + transport: str = "rest", request_type=translation_service.TranslateTextRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.translate_text(request) + + +def test_translate_text_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.TranslateTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + target_language_code="target_language_code_value", + contents=["contents_value"], + model="model_value", + mime_type="mime_type_value", + source_language_code="source_language_code_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.TranslateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.translate_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}:translateText" + % client.transport._host, + args[1], + ) + + +def test_translate_text_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.translate_text( + translation_service.TranslateTextRequest(), + parent="parent_value", + target_language_code="target_language_code_value", + contents=["contents_value"], + model="model_value", + mime_type="mime_type_value", + source_language_code="source_language_code_value", + ) + + +def test_translate_text_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.RomanizeTextRequest, + dict, + ], +) +def test_romanize_text_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.RomanizeTextResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.RomanizeTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.romanize_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.RomanizeTextResponse) + + +def test_romanize_text_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.romanize_text in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.romanize_text] = mock_rpc + + request = {} + client.romanize_text(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.romanize_text(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_romanize_text_rest_required_fields( + request_type=translation_service.RomanizeTextRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["contents"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).romanize_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["contents"] = "contents_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).romanize_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "contents" in jsonified_request + assert jsonified_request["contents"] == "contents_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = translation_service.RomanizeTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = translation_service.RomanizeTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.romanize_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_romanize_text_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.romanize_text._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "contents", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_romanize_text_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_romanize_text" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_romanize_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.RomanizeTextRequest.pb( + translation_service.RomanizeTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = translation_service.RomanizeTextResponse.to_json( + translation_service.RomanizeTextResponse() + ) + + request = translation_service.RomanizeTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = translation_service.RomanizeTextResponse() + + client.romanize_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_romanize_text_rest_bad_request( + transport: str = "rest", request_type=translation_service.RomanizeTextRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.romanize_text(request) + + +def test_romanize_text_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.RomanizeTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + contents=["contents_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.RomanizeTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.romanize_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}:romanizeText" + % client.transport._host, + args[1], + ) + + +def test_romanize_text_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.romanize_text( + translation_service.RomanizeTextRequest(), + parent="parent_value", + contents=["contents_value"], + ) + + +def test_romanize_text_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.DetectLanguageRequest, + dict, + ], +) +def test_detect_language_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.DetectLanguageResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.DetectLanguageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.detect_language(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.DetectLanguageResponse) + + +def test_detect_language_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.detect_language in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.detect_language] = mock_rpc + + request = {} + client.detect_language(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.detect_language(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_detect_language_rest_required_fields( + request_type=translation_service.DetectLanguageRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).detect_language._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).detect_language._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = translation_service.DetectLanguageResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = translation_service.DetectLanguageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.detect_language(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_detect_language_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.detect_language._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_detect_language_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_detect_language" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_detect_language" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.DetectLanguageRequest.pb( + translation_service.DetectLanguageRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = translation_service.DetectLanguageResponse.to_json( + translation_service.DetectLanguageResponse() + ) + + request = translation_service.DetectLanguageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = translation_service.DetectLanguageResponse() + + client.detect_language( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_detect_language_rest_bad_request( + transport: str = "rest", request_type=translation_service.DetectLanguageRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.detect_language(request) + + +def test_detect_language_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.DetectLanguageResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + model="model_value", + mime_type="mime_type_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.DetectLanguageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.detect_language(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}:detectLanguage" + % client.transport._host, + args[1], + ) + + +def test_detect_language_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.detect_language( + translation_service.DetectLanguageRequest(), + parent="parent_value", + model="model_value", + mime_type="mime_type_value", + content="content_value", + ) + + +def test_detect_language_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.GetSupportedLanguagesRequest, + dict, + ], +) +def test_get_supported_languages_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.SupportedLanguages() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.SupportedLanguages.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_supported_languages(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.SupportedLanguages) + + +def test_get_supported_languages_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_supported_languages + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_supported_languages + ] = mock_rpc + + request = {} + client.get_supported_languages(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_supported_languages(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_supported_languages_rest_required_fields( + request_type=translation_service.GetSupportedLanguagesRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_supported_languages._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_supported_languages._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "display_language_code", + "model", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = translation_service.SupportedLanguages() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = translation_service.SupportedLanguages.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_supported_languages(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_supported_languages_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_supported_languages._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "displayLanguageCode", + "model", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_supported_languages_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_get_supported_languages" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_get_supported_languages" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.GetSupportedLanguagesRequest.pb( + translation_service.GetSupportedLanguagesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = translation_service.SupportedLanguages.to_json( + translation_service.SupportedLanguages() + ) + + request = translation_service.GetSupportedLanguagesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = translation_service.SupportedLanguages() + + client.get_supported_languages( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_supported_languages_rest_bad_request( + transport: str = "rest", + request_type=translation_service.GetSupportedLanguagesRequest, +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_supported_languages(request) + + +def test_get_supported_languages_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.SupportedLanguages() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + model="model_value", + display_language_code="display_language_code_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.SupportedLanguages.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_supported_languages(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}/supportedLanguages" + % client.transport._host, + args[1], + ) + + +def test_get_supported_languages_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_supported_languages( + translation_service.GetSupportedLanguagesRequest(), + parent="parent_value", + model="model_value", + display_language_code="display_language_code_value", + ) + + +def test_get_supported_languages_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.TranslateDocumentRequest, + dict, + ], +) +def test_translate_document_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.TranslateDocumentResponse( + model="model_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.TranslateDocumentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.translate_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.TranslateDocumentResponse) + assert response.model == "model_value" + + +def test_translate_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.translate_document in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.translate_document + ] = mock_rpc + + request = {} + client.translate_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.translate_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_translate_document_rest_required_fields( + request_type=translation_service.TranslateDocumentRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["target_language_code"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).translate_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["targetLanguageCode"] = "target_language_code_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).translate_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "targetLanguageCode" in jsonified_request + assert jsonified_request["targetLanguageCode"] == "target_language_code_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = translation_service.TranslateDocumentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = translation_service.TranslateDocumentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.translate_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_translate_document_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.translate_document._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "targetLanguageCode", + "documentInputConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_translate_document_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_translate_document" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_translate_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.TranslateDocumentRequest.pb( + translation_service.TranslateDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + translation_service.TranslateDocumentResponse.to_json( + translation_service.TranslateDocumentResponse() + ) + ) + + request = translation_service.TranslateDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = translation_service.TranslateDocumentResponse() + + client.translate_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_translate_document_rest_bad_request( + transport: str = "rest", request_type=translation_service.TranslateDocumentRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.translate_document(request) + + +def test_translate_document_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.BatchTranslateTextRequest, + dict, + ], +) +def test_batch_translate_text_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_translate_text(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_translate_text_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_translate_text in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_translate_text + ] = mock_rpc + + request = {} + client.batch_translate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.batch_translate_text(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_translate_text_rest_required_fields( + request_type=translation_service.BatchTranslateTextRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["source_language_code"] = "" + request_init["target_language_codes"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_translate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["sourceLanguageCode"] = "source_language_code_value" + jsonified_request["targetLanguageCodes"] = "target_language_codes_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_translate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "sourceLanguageCode" in jsonified_request + assert jsonified_request["sourceLanguageCode"] == "source_language_code_value" + assert "targetLanguageCodes" in jsonified_request + assert jsonified_request["targetLanguageCodes"] == "target_language_codes_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_translate_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_translate_text_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_translate_text._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "sourceLanguageCode", + "targetLanguageCodes", + "inputConfigs", + "outputConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_translate_text_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_batch_translate_text" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_batch_translate_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.BatchTranslateTextRequest.pb( + translation_service.BatchTranslateTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = translation_service.BatchTranslateTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.batch_translate_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_translate_text_rest_bad_request( + transport: str = "rest", request_type=translation_service.BatchTranslateTextRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_translate_text(request) + + +def test_batch_translate_text_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.BatchTranslateDocumentRequest, + dict, + ], +) +def test_batch_translate_document_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_translate_document(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_translate_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_translate_document + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_translate_document + ] = mock_rpc + + request = {} + client.batch_translate_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.batch_translate_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_translate_document_rest_required_fields( + request_type=translation_service.BatchTranslateDocumentRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["source_language_code"] = "" + request_init["target_language_codes"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_translate_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["sourceLanguageCode"] = "source_language_code_value" + jsonified_request["targetLanguageCodes"] = "target_language_codes_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_translate_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "sourceLanguageCode" in jsonified_request + assert jsonified_request["sourceLanguageCode"] == "source_language_code_value" + assert "targetLanguageCodes" in jsonified_request + assert jsonified_request["targetLanguageCodes"] == "target_language_codes_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_translate_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_translate_document_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_translate_document._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "sourceLanguageCode", + "targetLanguageCodes", + "inputConfigs", + "outputConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_translate_document_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_batch_translate_document" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_batch_translate_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.BatchTranslateDocumentRequest.pb( + translation_service.BatchTranslateDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = translation_service.BatchTranslateDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.batch_translate_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_translate_document_rest_bad_request( + transport: str = "rest", + request_type=translation_service.BatchTranslateDocumentRequest, +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_translate_document(request) + + +def test_batch_translate_document_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + source_language_code="source_language_code_value", + target_language_codes=["target_language_codes_value"], + input_configs=[ + translation_service.BatchDocumentInputConfig( + gcs_source=translation_service.GcsSource( + input_uri="input_uri_value" + ) + ) + ], + output_config=translation_service.BatchDocumentOutputConfig( + gcs_destination=translation_service.GcsDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.batch_translate_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}:batchTranslateDocument" + % client.transport._host, + args[1], + ) + + +def test_batch_translate_document_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_translate_document( + translation_service.BatchTranslateDocumentRequest(), + parent="parent_value", + source_language_code="source_language_code_value", + target_language_codes=["target_language_codes_value"], + input_configs=[ + translation_service.BatchDocumentInputConfig( + gcs_source=translation_service.GcsSource( + input_uri="input_uri_value" + ) + ) + ], + output_config=translation_service.BatchDocumentOutputConfig( + gcs_destination=translation_service.GcsDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ), + ) + + +def test_batch_translate_document_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.CreateGlossaryRequest, + dict, + ], +) +def test_create_glossary_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["glossary"] = { + "name": "name_value", + "language_pair": { + "source_language_code": "source_language_code_value", + "target_language_code": "target_language_code_value", + }, + "language_codes_set": { + "language_codes": ["language_codes_value1", "language_codes_value2"] + }, + "input_config": {"gcs_source": {"input_uri": "input_uri_value"}}, + "entry_count": 1210, + "submit_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + "display_name": "display_name_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = translation_service.CreateGlossaryRequest.meta.fields["glossary"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["glossary"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["glossary"][field])): + del request_init["glossary"][field][i][subfield] + else: + del request_init["glossary"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_glossary(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_glossary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_glossary] = mock_rpc + + request = {} + client.create_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_glossary_rest_required_fields( + request_type=translation_service.CreateGlossaryRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_glossary(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_glossary_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_glossary._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "glossary", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_glossary_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_create_glossary" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_create_glossary" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.CreateGlossaryRequest.pb( + translation_service.CreateGlossaryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = translation_service.CreateGlossaryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_glossary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_glossary_rest_bad_request( + transport: str = "rest", request_type=translation_service.CreateGlossaryRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_glossary(request) + + +def test_create_glossary_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + glossary=translation_service.Glossary(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_glossary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}/glossaries" % client.transport._host, + args[1], + ) + + +def test_create_glossary_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_glossary( + translation_service.CreateGlossaryRequest(), + parent="parent_value", + glossary=translation_service.Glossary(name="name_value"), + ) + + +def test_create_glossary_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.UpdateGlossaryRequest, + dict, + ], +) +def test_update_glossary_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "glossary": {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + } + request_init["glossary"] = { + "name": "projects/sample1/locations/sample2/glossaries/sample3", + "language_pair": { + "source_language_code": "source_language_code_value", + "target_language_code": "target_language_code_value", + }, + "language_codes_set": { + "language_codes": ["language_codes_value1", "language_codes_value2"] + }, + "input_config": {"gcs_source": {"input_uri": "input_uri_value"}}, + "entry_count": 1210, + "submit_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + "display_name": "display_name_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = translation_service.UpdateGlossaryRequest.meta.fields["glossary"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["glossary"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["glossary"][field])): + del request_init["glossary"][field][i][subfield] + else: + del request_init["glossary"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_glossary(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_glossary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_glossary] = mock_rpc + + request = {} + client.update_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_glossary_rest_required_fields( + request_type=translation_service.UpdateGlossaryRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_glossary._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_glossary(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_glossary_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_glossary._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("glossary",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_glossary_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_update_glossary" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_update_glossary" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.UpdateGlossaryRequest.pb( + translation_service.UpdateGlossaryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = translation_service.UpdateGlossaryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_glossary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_glossary_rest_bad_request( + transport: str = "rest", request_type=translation_service.UpdateGlossaryRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "glossary": {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_glossary(request) + + +def test_update_glossary_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "glossary": { + "name": "projects/sample1/locations/sample2/glossaries/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + glossary=translation_service.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_glossary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{glossary.name=projects/*/locations/*/glossaries/*}" + % client.transport._host, + args[1], + ) + + +def test_update_glossary_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_glossary( + translation_service.UpdateGlossaryRequest(), + glossary=translation_service.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_glossary_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.ListGlossariesRequest, + dict, + ], +) +def test_list_glossaries_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.ListGlossariesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.ListGlossariesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_glossaries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossariesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_glossaries_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_glossaries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_glossaries] = mock_rpc + + request = {} + client.list_glossaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_glossaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_glossaries_rest_required_fields( + request_type=translation_service.ListGlossariesRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossaries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossaries._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = translation_service.ListGlossariesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = translation_service.ListGlossariesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_glossaries(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_glossaries_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_glossaries._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_glossaries_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_list_glossaries" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_list_glossaries" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.ListGlossariesRequest.pb( + translation_service.ListGlossariesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = translation_service.ListGlossariesResponse.to_json( + translation_service.ListGlossariesResponse() + ) + + request = translation_service.ListGlossariesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = translation_service.ListGlossariesResponse() + + client.list_glossaries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_glossaries_rest_bad_request( + transport: str = "rest", request_type=translation_service.ListGlossariesRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_glossaries(request) + + +def test_list_glossaries_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.ListGlossariesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.ListGlossariesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_glossaries(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}/glossaries" % client.transport._host, + args[1], + ) + + +def test_list_glossaries_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_glossaries( + translation_service.ListGlossariesRequest(), + parent="parent_value", + ) + + +def test_list_glossaries_rest_pager(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + translation_service.Glossary(), + translation_service.Glossary(), + ], + next_page_token="abc", + ), + translation_service.ListGlossariesResponse( + glossaries=[], + next_page_token="def", + ), + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + ], + next_page_token="ghi", + ), + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + translation_service.Glossary(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + translation_service.ListGlossariesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_glossaries(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, translation_service.Glossary) for i in results) + + pages = list(client.list_glossaries(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.GetGlossaryRequest, + dict, + ], +) +def test_get_glossary_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.Glossary( + name="name_value", + entry_count=1210, + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.Glossary.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_glossary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.Glossary) + assert response.name == "name_value" + assert response.entry_count == 1210 + assert response.display_name == "display_name_value" + + +def test_get_glossary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_glossary] = mock_rpc + + request = {} + client.get_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_glossary_rest_required_fields( + request_type=translation_service.GetGlossaryRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = translation_service.Glossary() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = translation_service.Glossary.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_glossary(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_glossary_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_glossary._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_glossary_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_get_glossary" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_get_glossary" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.GetGlossaryRequest.pb( + translation_service.GetGlossaryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = translation_service.Glossary.to_json( + translation_service.Glossary() + ) + + request = translation_service.GetGlossaryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = translation_service.Glossary() + + client.get_glossary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_glossary_rest_bad_request( + transport: str = "rest", request_type=translation_service.GetGlossaryRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_glossary(request) + + +def test_get_glossary_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.Glossary() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.Glossary.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_glossary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{name=projects/*/locations/*/glossaries/*}" % client.transport._host, + args[1], + ) + + +def test_get_glossary_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_glossary( + translation_service.GetGlossaryRequest(), + name="name_value", + ) + + +def test_get_glossary_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.DeleteGlossaryRequest, + dict, + ], +) +def test_delete_glossary_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_glossary(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_glossary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_glossary] = mock_rpc + + request = {} + client.delete_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_glossary_rest_required_fields( + request_type=translation_service.DeleteGlossaryRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_glossary(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_glossary_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_glossary._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_glossary_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_delete_glossary" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_delete_glossary" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.DeleteGlossaryRequest.pb( + translation_service.DeleteGlossaryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = translation_service.DeleteGlossaryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_glossary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_glossary_rest_bad_request( + transport: str = "rest", request_type=translation_service.DeleteGlossaryRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_glossary(request) + + +def test_delete_glossary_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_glossary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{name=projects/*/locations/*/glossaries/*}" % client.transport._host, + args[1], + ) + + +def test_delete_glossary_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_glossary( + translation_service.DeleteGlossaryRequest(), + name="name_value", + ) + + +def test_delete_glossary_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.GetGlossaryEntryRequest, + dict, + ], +) +def test_get_glossary_entry_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/glossaryEntries/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common.GlossaryEntry( + name="name_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common.GlossaryEntry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_glossary_entry(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.GlossaryEntry) + assert response.name == "name_value" + assert response.description == "description_value" + + +def test_get_glossary_entry_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_glossary_entry in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_glossary_entry + ] = mock_rpc + + request = {} + client.get_glossary_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_glossary_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_glossary_entry_rest_required_fields( + request_type=translation_service.GetGlossaryEntryRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common.GlossaryEntry() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common.GlossaryEntry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_glossary_entry(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_glossary_entry_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_glossary_entry._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_glossary_entry_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_get_glossary_entry" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_get_glossary_entry" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.GetGlossaryEntryRequest.pb( + translation_service.GetGlossaryEntryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common.GlossaryEntry.to_json(common.GlossaryEntry()) + + request = translation_service.GetGlossaryEntryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.GlossaryEntry() + + client.get_glossary_entry( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_glossary_entry_rest_bad_request( + transport: str = "rest", request_type=translation_service.GetGlossaryEntryRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/glossaryEntries/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_glossary_entry(request) + + +def test_get_glossary_entry_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common.GlossaryEntry() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/glossaryEntries/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common.GlossaryEntry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_glossary_entry(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{name=projects/*/locations/*/glossaries/*/glossaryEntries/*}" + % client.transport._host, + args[1], + ) + + +def test_get_glossary_entry_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_glossary_entry( + translation_service.GetGlossaryEntryRequest(), + name="name_value", + ) + + +def test_get_glossary_entry_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.ListGlossaryEntriesRequest, + dict, + ], +) +def test_list_glossary_entries_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.ListGlossaryEntriesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.ListGlossaryEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_glossary_entries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossaryEntriesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_glossary_entries_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_glossary_entries + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_glossary_entries + ] = mock_rpc + + request = {} + client.list_glossary_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_glossary_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_glossary_entries_rest_required_fields( + request_type=translation_service.ListGlossaryEntriesRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossary_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossary_entries._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = translation_service.ListGlossaryEntriesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = translation_service.ListGlossaryEntriesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_glossary_entries(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_glossary_entries_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_glossary_entries._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_glossary_entries_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_list_glossary_entries" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_list_glossary_entries" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.ListGlossaryEntriesRequest.pb( + translation_service.ListGlossaryEntriesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + translation_service.ListGlossaryEntriesResponse.to_json( + translation_service.ListGlossaryEntriesResponse() + ) + ) + + request = translation_service.ListGlossaryEntriesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = translation_service.ListGlossaryEntriesResponse() + + client.list_glossary_entries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_glossary_entries_rest_bad_request( + transport: str = "rest", request_type=translation_service.ListGlossaryEntriesRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_glossary_entries(request) + + +def test_list_glossary_entries_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.ListGlossaryEntriesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/glossaries/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.ListGlossaryEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_glossary_entries(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*/glossaries/*}/glossaryEntries" + % client.transport._host, + args[1], + ) + + +def test_list_glossary_entries_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_glossary_entries( + translation_service.ListGlossaryEntriesRequest(), + parent="parent_value", + ) + + +def test_list_glossary_entries_rest_pager(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[ + common.GlossaryEntry(), + common.GlossaryEntry(), + common.GlossaryEntry(), + ], + next_page_token="abc", + ), + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[], + next_page_token="def", + ), + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[ + common.GlossaryEntry(), + ], + next_page_token="ghi", + ), + translation_service.ListGlossaryEntriesResponse( + glossary_entries=[ + common.GlossaryEntry(), + common.GlossaryEntry(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + translation_service.ListGlossaryEntriesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/glossaries/sample3" + } + + pager = client.list_glossary_entries(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, common.GlossaryEntry) for i in results) + + pages = list(client.list_glossary_entries(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.CreateGlossaryEntryRequest, + dict, + ], +) +def test_create_glossary_entry_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} + request_init["glossary_entry"] = { + "name": "name_value", + "terms_pair": { + "source_term": { + "language_code": "language_code_value", + "text": "text_value", + }, + "target_term": {}, + }, + "terms_set": {"terms": {}}, + "description": "description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = translation_service.CreateGlossaryEntryRequest.meta.fields[ + "glossary_entry" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["glossary_entry"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["glossary_entry"][field])): + del request_init["glossary_entry"][field][i][subfield] + else: + del request_init["glossary_entry"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common.GlossaryEntry( + name="name_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common.GlossaryEntry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_glossary_entry(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.GlossaryEntry) + assert response.name == "name_value" + assert response.description == "description_value" + + +def test_create_glossary_entry_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_glossary_entry + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_glossary_entry + ] = mock_rpc + + request = {} + client.create_glossary_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_glossary_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_glossary_entry_rest_required_fields( + request_type=translation_service.CreateGlossaryEntryRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common.GlossaryEntry() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common.GlossaryEntry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_glossary_entry(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_glossary_entry_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_glossary_entry._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "glossaryEntry", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_glossary_entry_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_create_glossary_entry" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_create_glossary_entry" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.CreateGlossaryEntryRequest.pb( + translation_service.CreateGlossaryEntryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common.GlossaryEntry.to_json(common.GlossaryEntry()) + + request = translation_service.CreateGlossaryEntryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.GlossaryEntry() + + client.create_glossary_entry( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_glossary_entry_rest_bad_request( + transport: str = "rest", request_type=translation_service.CreateGlossaryEntryRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_glossary_entry(request) + + +def test_create_glossary_entry_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common.GlossaryEntry() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/glossaries/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + glossary_entry=common.GlossaryEntry(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common.GlossaryEntry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_glossary_entry(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*/glossaries/*}/glossaryEntries" + % client.transport._host, + args[1], + ) + + +def test_create_glossary_entry_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_glossary_entry( + translation_service.CreateGlossaryEntryRequest(), + parent="parent_value", + glossary_entry=common.GlossaryEntry(name="name_value"), + ) + + +def test_create_glossary_entry_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.UpdateGlossaryEntryRequest, + dict, + ], +) +def test_update_glossary_entry_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "glossary_entry": { + "name": "projects/sample1/locations/sample2/glossaries/sample3/glossaryEntries/sample4" + } + } + request_init["glossary_entry"] = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/glossaryEntries/sample4", + "terms_pair": { + "source_term": { + "language_code": "language_code_value", + "text": "text_value", + }, + "target_term": {}, + }, + "terms_set": {"terms": {}}, + "description": "description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = translation_service.UpdateGlossaryEntryRequest.meta.fields[ + "glossary_entry" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["glossary_entry"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["glossary_entry"][field])): + del request_init["glossary_entry"][field][i][subfield] + else: + del request_init["glossary_entry"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common.GlossaryEntry( + name="name_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common.GlossaryEntry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_glossary_entry(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.GlossaryEntry) + assert response.name == "name_value" + assert response.description == "description_value" + + +def test_update_glossary_entry_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_glossary_entry + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_glossary_entry + ] = mock_rpc + + request = {} + client.update_glossary_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_glossary_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_glossary_entry_rest_required_fields( + request_type=translation_service.UpdateGlossaryEntryRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_glossary_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_glossary_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common.GlossaryEntry() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common.GlossaryEntry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_glossary_entry(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_glossary_entry_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_glossary_entry._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("glossaryEntry",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_glossary_entry_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_update_glossary_entry" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_update_glossary_entry" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.UpdateGlossaryEntryRequest.pb( + translation_service.UpdateGlossaryEntryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common.GlossaryEntry.to_json(common.GlossaryEntry()) + + request = translation_service.UpdateGlossaryEntryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.GlossaryEntry() + + client.update_glossary_entry( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_glossary_entry_rest_bad_request( + transport: str = "rest", request_type=translation_service.UpdateGlossaryEntryRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "glossary_entry": { + "name": "projects/sample1/locations/sample2/glossaries/sample3/glossaryEntries/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_glossary_entry(request) + + +def test_update_glossary_entry_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common.GlossaryEntry() + + # get arguments that satisfy an http rule for this method + sample_request = { + "glossary_entry": { + "name": "projects/sample1/locations/sample2/glossaries/sample3/glossaryEntries/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + glossary_entry=common.GlossaryEntry(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common.GlossaryEntry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_glossary_entry(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{glossary_entry.name=projects/*/locations/*/glossaries/*/glossaryEntries/*}" + % client.transport._host, + args[1], + ) + + +def test_update_glossary_entry_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_glossary_entry( + translation_service.UpdateGlossaryEntryRequest(), + glossary_entry=common.GlossaryEntry(name="name_value"), + ) + + +def test_update_glossary_entry_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.DeleteGlossaryEntryRequest, + dict, + ], +) +def test_delete_glossary_entry_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/glossaryEntries/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_glossary_entry(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_glossary_entry_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_glossary_entry + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_glossary_entry + ] = mock_rpc + + request = {} + client.delete_glossary_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_glossary_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_glossary_entry_rest_required_fields( + request_type=translation_service.DeleteGlossaryEntryRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_glossary_entry(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_glossary_entry_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_glossary_entry._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_glossary_entry_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_delete_glossary_entry" + ) as pre: + pre.assert_not_called() + pb_message = translation_service.DeleteGlossaryEntryRequest.pb( + translation_service.DeleteGlossaryEntryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = translation_service.DeleteGlossaryEntryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_glossary_entry( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_glossary_entry_rest_bad_request( + transport: str = "rest", request_type=translation_service.DeleteGlossaryEntryRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/glossaryEntries/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_glossary_entry(request) + + +def test_delete_glossary_entry_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/glossaryEntries/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_glossary_entry(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{name=projects/*/locations/*/glossaries/*/glossaryEntries/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_glossary_entry_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_glossary_entry( + translation_service.DeleteGlossaryEntryRequest(), + name="name_value", + ) + + +def test_delete_glossary_entry_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + automl_translation.CreateDatasetRequest, + dict, + ], +) +def test_create_dataset_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["dataset"] = { + "name": "name_value", + "display_name": "display_name_value", + "source_language_code": "source_language_code_value", + "target_language_code": "target_language_code_value", + "example_count": 1396, + "train_example_count": 2033, + "validate_example_count": 2333, + "test_example_count": 1939, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = automl_translation.CreateDatasetRequest.meta.fields["dataset"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["dataset"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["dataset"][field])): + del request_init["dataset"][field][i][subfield] + else: + del request_init["dataset"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_dataset(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_dataset_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_dataset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_dataset] = mock_rpc + + request = {} + client.create_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_dataset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_dataset_rest_required_fields( + request_type=automl_translation.CreateDatasetRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_dataset(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_dataset_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_dataset._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "dataset", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_dataset_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_create_dataset" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_create_dataset" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = automl_translation.CreateDatasetRequest.pb( + automl_translation.CreateDatasetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = automl_translation.CreateDatasetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_dataset( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_dataset_rest_bad_request( + transport: str = "rest", request_type=automl_translation.CreateDatasetRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_dataset(request) + + +def test_create_dataset_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + dataset=automl_translation.Dataset(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_dataset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}/datasets" % client.transport._host, + args[1], + ) + + +def test_create_dataset_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dataset( + automl_translation.CreateDatasetRequest(), + parent="parent_value", + dataset=automl_translation.Dataset(name="name_value"), + ) + + +def test_create_dataset_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + automl_translation.GetDatasetRequest, + dict, + ], +) +def test_get_dataset_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/datasets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = automl_translation.Dataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + train_example_count=2033, + validate_example_count=2333, + test_example_count=1939, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = automl_translation.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_dataset(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, automl_translation.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.example_count == 1396 + assert response.train_example_count == 2033 + assert response.validate_example_count == 2333 + assert response.test_example_count == 1939 + + +def test_get_dataset_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_dataset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_dataset] = mock_rpc + + request = {} + client.get_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_dataset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_dataset_rest_required_fields( + request_type=automl_translation.GetDatasetRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = automl_translation.Dataset() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = automl_translation.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_dataset(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_dataset_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_dataset._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_dataset_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_get_dataset" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_get_dataset" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = automl_translation.GetDatasetRequest.pb( + automl_translation.GetDatasetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = automl_translation.Dataset.to_json( + automl_translation.Dataset() + ) + + request = automl_translation.GetDatasetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = automl_translation.Dataset() + + client.get_dataset( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_dataset_rest_bad_request( + transport: str = "rest", request_type=automl_translation.GetDatasetRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/datasets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_dataset(request) + + +def test_get_dataset_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = automl_translation.Dataset() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/datasets/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = automl_translation.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_dataset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{name=projects/*/locations/*/datasets/*}" % client.transport._host, + args[1], + ) + + +def test_get_dataset_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dataset( + automl_translation.GetDatasetRequest(), + name="name_value", + ) + + +def test_get_dataset_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + automl_translation.ListDatasetsRequest, + dict, + ], +) +def test_list_datasets_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = automl_translation.ListDatasetsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = automl_translation.ListDatasetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_datasets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatasetsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_datasets_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_datasets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_datasets] = mock_rpc + + request = {} + client.list_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_datasets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_datasets_rest_required_fields( + request_type=automl_translation.ListDatasetsRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_datasets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_datasets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = automl_translation.ListDatasetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = automl_translation.ListDatasetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_datasets(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_datasets_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_datasets._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_datasets_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_list_datasets" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_list_datasets" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = automl_translation.ListDatasetsRequest.pb( + automl_translation.ListDatasetsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = automl_translation.ListDatasetsResponse.to_json( + automl_translation.ListDatasetsResponse() + ) + + request = automl_translation.ListDatasetsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = automl_translation.ListDatasetsResponse() + + client.list_datasets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_datasets_rest_bad_request( + transport: str = "rest", request_type=automl_translation.ListDatasetsRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_datasets(request) + + +def test_list_datasets_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = automl_translation.ListDatasetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = automl_translation.ListDatasetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_datasets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}/datasets" % client.transport._host, + args[1], + ) + + +def test_list_datasets_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_datasets( + automl_translation.ListDatasetsRequest(), + parent="parent_value", + ) + + +def test_list_datasets_rest_pager(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + automl_translation.ListDatasetsResponse( + datasets=[ + automl_translation.Dataset(), + automl_translation.Dataset(), + automl_translation.Dataset(), + ], + next_page_token="abc", + ), + automl_translation.ListDatasetsResponse( + datasets=[], + next_page_token="def", + ), + automl_translation.ListDatasetsResponse( + datasets=[ + automl_translation.Dataset(), + ], + next_page_token="ghi", + ), + automl_translation.ListDatasetsResponse( + datasets=[ + automl_translation.Dataset(), + automl_translation.Dataset(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + automl_translation.ListDatasetsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_datasets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, automl_translation.Dataset) for i in results) + + pages = list(client.list_datasets(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + automl_translation.DeleteDatasetRequest, + dict, + ], +) +def test_delete_dataset_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/datasets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_dataset(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_dataset_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_dataset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_dataset] = mock_rpc + + request = {} + client.delete_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_dataset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_dataset_rest_required_fields( + request_type=automl_translation.DeleteDatasetRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_dataset(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_dataset_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_dataset._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_dataset_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_delete_dataset" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_delete_dataset" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = automl_translation.DeleteDatasetRequest.pb( + automl_translation.DeleteDatasetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = automl_translation.DeleteDatasetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_dataset( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_dataset_rest_bad_request( + transport: str = "rest", request_type=automl_translation.DeleteDatasetRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/datasets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_dataset(request) + + +def test_delete_dataset_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/datasets/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_dataset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{name=projects/*/locations/*/datasets/*}" % client.transport._host, + args[1], + ) + + +def test_delete_dataset_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dataset( + automl_translation.DeleteDatasetRequest(), + name="name_value", + ) + + +def test_delete_dataset_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.CreateAdaptiveMtDatasetRequest, + dict, + ], +) +def test_create_adaptive_mt_dataset_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["adaptive_mt_dataset"] = { + "name": "name_value", + "display_name": "display_name_value", + "source_language_code": "source_language_code_value", + "target_language_code": "target_language_code_value", + "example_count": 1396, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = adaptive_mt.CreateAdaptiveMtDatasetRequest.meta.fields[ + "adaptive_mt_dataset" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["adaptive_mt_dataset"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["adaptive_mt_dataset"][field])): + del request_init["adaptive_mt_dataset"][field][i][subfield] + else: + del request_init["adaptive_mt_dataset"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.AdaptiveMtDataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_adaptive_mt_dataset(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtDataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.example_count == 1396 + + +def test_create_adaptive_mt_dataset_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_adaptive_mt_dataset + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_adaptive_mt_dataset + ] = mock_rpc + + request = {} + client.create_adaptive_mt_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_adaptive_mt_dataset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_adaptive_mt_dataset_rest_required_fields( + request_type=adaptive_mt.CreateAdaptiveMtDatasetRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.AdaptiveMtDataset() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_adaptive_mt_dataset(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_adaptive_mt_dataset_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_adaptive_mt_dataset._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "adaptiveMtDataset", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_adaptive_mt_dataset_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_create_adaptive_mt_dataset" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_create_adaptive_mt_dataset" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = adaptive_mt.CreateAdaptiveMtDatasetRequest.pb( + adaptive_mt.CreateAdaptiveMtDatasetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = adaptive_mt.AdaptiveMtDataset.to_json( + adaptive_mt.AdaptiveMtDataset() + ) + + request = adaptive_mt.CreateAdaptiveMtDatasetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = adaptive_mt.AdaptiveMtDataset() + + client.create_adaptive_mt_dataset( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_adaptive_mt_dataset_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.CreateAdaptiveMtDatasetRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_adaptive_mt_dataset(request) + + +def test_create_adaptive_mt_dataset_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.AdaptiveMtDataset() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_adaptive_mt_dataset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}/adaptiveMtDatasets" + % client.transport._host, + args[1], + ) + + +def test_create_adaptive_mt_dataset_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_adaptive_mt_dataset( + adaptive_mt.CreateAdaptiveMtDatasetRequest(), + parent="parent_value", + adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), + ) + + +def test_create_adaptive_mt_dataset_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.DeleteAdaptiveMtDatasetRequest, + dict, + ], +) +def test_delete_adaptive_mt_dataset_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_adaptive_mt_dataset(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_adaptive_mt_dataset_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_adaptive_mt_dataset + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_adaptive_mt_dataset + ] = mock_rpc + + request = {} + client.delete_adaptive_mt_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_adaptive_mt_dataset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_adaptive_mt_dataset_rest_required_fields( + request_type=adaptive_mt.DeleteAdaptiveMtDatasetRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_adaptive_mt_dataset(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_adaptive_mt_dataset_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_adaptive_mt_dataset._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_adaptive_mt_dataset_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_delete_adaptive_mt_dataset" + ) as pre: + pre.assert_not_called() + pb_message = adaptive_mt.DeleteAdaptiveMtDatasetRequest.pb( + adaptive_mt.DeleteAdaptiveMtDatasetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = adaptive_mt.DeleteAdaptiveMtDatasetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_adaptive_mt_dataset( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_adaptive_mt_dataset_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.DeleteAdaptiveMtDatasetRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_adaptive_mt_dataset(request) + + +def test_delete_adaptive_mt_dataset_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_adaptive_mt_dataset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_adaptive_mt_dataset_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_adaptive_mt_dataset( + adaptive_mt.DeleteAdaptiveMtDatasetRequest(), + name="name_value", + ) + + +def test_delete_adaptive_mt_dataset_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.GetAdaptiveMtDatasetRequest, + dict, + ], +) +def test_get_adaptive_mt_dataset_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.AdaptiveMtDataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_adaptive_mt_dataset(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtDataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.example_count == 1396 + + +def test_get_adaptive_mt_dataset_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_adaptive_mt_dataset + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_adaptive_mt_dataset + ] = mock_rpc + + request = {} + client.get_adaptive_mt_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_adaptive_mt_dataset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_adaptive_mt_dataset_rest_required_fields( + request_type=adaptive_mt.GetAdaptiveMtDatasetRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.AdaptiveMtDataset() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_adaptive_mt_dataset(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_adaptive_mt_dataset_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_adaptive_mt_dataset._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_adaptive_mt_dataset_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_get_adaptive_mt_dataset" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_get_adaptive_mt_dataset" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = adaptive_mt.GetAdaptiveMtDatasetRequest.pb( + adaptive_mt.GetAdaptiveMtDatasetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = adaptive_mt.AdaptiveMtDataset.to_json( + adaptive_mt.AdaptiveMtDataset() + ) + + request = adaptive_mt.GetAdaptiveMtDatasetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = adaptive_mt.AdaptiveMtDataset() + + client.get_adaptive_mt_dataset( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_adaptive_mt_dataset_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.GetAdaptiveMtDatasetRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_adaptive_mt_dataset(request) + + +def test_get_adaptive_mt_dataset_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.AdaptiveMtDataset() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_adaptive_mt_dataset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*}" + % client.transport._host, + args[1], + ) + + +def test_get_adaptive_mt_dataset_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_adaptive_mt_dataset( + adaptive_mt.GetAdaptiveMtDatasetRequest(), + name="name_value", + ) + + +def test_get_adaptive_mt_dataset_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.ListAdaptiveMtDatasetsRequest, + dict, + ], +) +def test_list_adaptive_mt_datasets_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_adaptive_mt_datasets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAdaptiveMtDatasetsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_adaptive_mt_datasets_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_adaptive_mt_datasets + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_adaptive_mt_datasets + ] = mock_rpc + + request = {} + client.list_adaptive_mt_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_adaptive_mt_datasets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_adaptive_mt_datasets_rest_required_fields( + request_type=adaptive_mt.ListAdaptiveMtDatasetsRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_adaptive_mt_datasets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_adaptive_mt_datasets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_adaptive_mt_datasets(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_adaptive_mt_datasets_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_adaptive_mt_datasets._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_adaptive_mt_datasets_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_list_adaptive_mt_datasets" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_list_adaptive_mt_datasets" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = adaptive_mt.ListAdaptiveMtDatasetsRequest.pb( + adaptive_mt.ListAdaptiveMtDatasetsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = adaptive_mt.ListAdaptiveMtDatasetsResponse.to_json( + adaptive_mt.ListAdaptiveMtDatasetsResponse() + ) + + request = adaptive_mt.ListAdaptiveMtDatasetsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() + + client.list_adaptive_mt_datasets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_adaptive_mt_datasets_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.ListAdaptiveMtDatasetsRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_adaptive_mt_datasets(request) + + +def test_list_adaptive_mt_datasets_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_adaptive_mt_datasets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}/adaptiveMtDatasets" + % client.transport._host, + args[1], + ) + + +def test_list_adaptive_mt_datasets_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_adaptive_mt_datasets( + adaptive_mt.ListAdaptiveMtDatasetsRequest(), + parent="parent_value", + ) + + +def test_list_adaptive_mt_datasets_rest_pager(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + adaptive_mt.ListAdaptiveMtDatasetsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_adaptive_mt_datasets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtDataset) for i in results) + + pages = list(client.list_adaptive_mt_datasets(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.AdaptiveMtTranslateRequest, + dict, + ], +) +def test_adaptive_mt_translate_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.AdaptiveMtTranslateResponse( + language_code="language_code_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtTranslateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.adaptive_mt_translate(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtTranslateResponse) + assert response.language_code == "language_code_value" + + +def test_adaptive_mt_translate_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.adaptive_mt_translate + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.adaptive_mt_translate + ] = mock_rpc + + request = {} + client.adaptive_mt_translate(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.adaptive_mt_translate(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_adaptive_mt_translate_rest_required_fields( + request_type=adaptive_mt.AdaptiveMtTranslateRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["dataset"] = "" + request_init["content"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).adaptive_mt_translate._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["dataset"] = "dataset_value" + jsonified_request["content"] = "content_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).adaptive_mt_translate._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "dataset" in jsonified_request + assert jsonified_request["dataset"] == "dataset_value" + assert "content" in jsonified_request + assert jsonified_request["content"] == "content_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.AdaptiveMtTranslateResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtTranslateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.adaptive_mt_translate(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_adaptive_mt_translate_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.adaptive_mt_translate._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "dataset", + "content", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_adaptive_mt_translate_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_adaptive_mt_translate" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_adaptive_mt_translate" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = adaptive_mt.AdaptiveMtTranslateRequest.pb( + adaptive_mt.AdaptiveMtTranslateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = adaptive_mt.AdaptiveMtTranslateResponse.to_json( + adaptive_mt.AdaptiveMtTranslateResponse() + ) + + request = adaptive_mt.AdaptiveMtTranslateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = adaptive_mt.AdaptiveMtTranslateResponse() + + client.adaptive_mt_translate( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_adaptive_mt_translate_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.AdaptiveMtTranslateRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.adaptive_mt_translate(request) + + +def test_adaptive_mt_translate_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.AdaptiveMtTranslateResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + content=["content_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtTranslateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.adaptive_mt_translate(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}:adaptiveMtTranslate" + % client.transport._host, + args[1], + ) + + +def test_adaptive_mt_translate_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.adaptive_mt_translate( + adaptive_mt.AdaptiveMtTranslateRequest(), + parent="parent_value", + content=["content_value"], + ) + + +def test_adaptive_mt_translate_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.GetAdaptiveMtFileRequest, + dict, + ], +) +def test_get_adaptive_mt_file_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.AdaptiveMtFile( + name="name_value", + display_name="display_name_value", + entry_count=1210, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtFile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_adaptive_mt_file(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtFile) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.entry_count == 1210 + + +def test_get_adaptive_mt_file_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_adaptive_mt_file in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_adaptive_mt_file + ] = mock_rpc + + request = {} + client.get_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_adaptive_mt_file(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_adaptive_mt_file_rest_required_fields( + request_type=adaptive_mt.GetAdaptiveMtFileRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_adaptive_mt_file._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_adaptive_mt_file._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.AdaptiveMtFile() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtFile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_adaptive_mt_file(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_adaptive_mt_file_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_adaptive_mt_file._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_adaptive_mt_file_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_get_adaptive_mt_file" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_get_adaptive_mt_file" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = adaptive_mt.GetAdaptiveMtFileRequest.pb( + adaptive_mt.GetAdaptiveMtFileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = adaptive_mt.AdaptiveMtFile.to_json( + adaptive_mt.AdaptiveMtFile() + ) + + request = adaptive_mt.GetAdaptiveMtFileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = adaptive_mt.AdaptiveMtFile() + + client.get_adaptive_mt_file( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_adaptive_mt_file_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.GetAdaptiveMtFileRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_adaptive_mt_file(request) + + +def test_get_adaptive_mt_file_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.AdaptiveMtFile() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtFile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_adaptive_mt_file(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}" + % client.transport._host, + args[1], + ) + + +def test_get_adaptive_mt_file_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_adaptive_mt_file( + adaptive_mt.GetAdaptiveMtFileRequest(), + name="name_value", + ) + + +def test_get_adaptive_mt_file_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.DeleteAdaptiveMtFileRequest, + dict, + ], +) +def test_delete_adaptive_mt_file_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_adaptive_mt_file(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_adaptive_mt_file_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_adaptive_mt_file + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_adaptive_mt_file + ] = mock_rpc + + request = {} + client.delete_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_adaptive_mt_file(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_adaptive_mt_file_rest_required_fields( + request_type=adaptive_mt.DeleteAdaptiveMtFileRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_adaptive_mt_file._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_adaptive_mt_file._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_adaptive_mt_file(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_adaptive_mt_file_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_adaptive_mt_file._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_adaptive_mt_file_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_delete_adaptive_mt_file" + ) as pre: + pre.assert_not_called() + pb_message = adaptive_mt.DeleteAdaptiveMtFileRequest.pb( + adaptive_mt.DeleteAdaptiveMtFileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = adaptive_mt.DeleteAdaptiveMtFileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_adaptive_mt_file( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_adaptive_mt_file_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.DeleteAdaptiveMtFileRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_adaptive_mt_file(request) + + +def test_delete_adaptive_mt_file_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_adaptive_mt_file(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_adaptive_mt_file_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_adaptive_mt_file( + adaptive_mt.DeleteAdaptiveMtFileRequest(), + name="name_value", + ) + + +def test_delete_adaptive_mt_file_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.ImportAdaptiveMtFileRequest, + dict, + ], +) +def test_import_adaptive_mt_file_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.ImportAdaptiveMtFileResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.ImportAdaptiveMtFileResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.import_adaptive_mt_file(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.ImportAdaptiveMtFileResponse) + + +def test_import_adaptive_mt_file_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.import_adaptive_mt_file + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.import_adaptive_mt_file + ] = mock_rpc + + request = {} + client.import_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.import_adaptive_mt_file(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_import_adaptive_mt_file_rest_required_fields( + request_type=adaptive_mt.ImportAdaptiveMtFileRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_adaptive_mt_file._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_adaptive_mt_file._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.ImportAdaptiveMtFileResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = adaptive_mt.ImportAdaptiveMtFileResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.import_adaptive_mt_file(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_import_adaptive_mt_file_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.import_adaptive_mt_file._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_adaptive_mt_file_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_import_adaptive_mt_file" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_import_adaptive_mt_file" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = adaptive_mt.ImportAdaptiveMtFileRequest.pb( + adaptive_mt.ImportAdaptiveMtFileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = adaptive_mt.ImportAdaptiveMtFileResponse.to_json( + adaptive_mt.ImportAdaptiveMtFileResponse() + ) + + request = adaptive_mt.ImportAdaptiveMtFileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = adaptive_mt.ImportAdaptiveMtFileResponse() + + client.import_adaptive_mt_file( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_adaptive_mt_file_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.ImportAdaptiveMtFileRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.import_adaptive_mt_file(request) + + +def test_import_adaptive_mt_file_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.ImportAdaptiveMtFileResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.ImportAdaptiveMtFileResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.import_adaptive_mt_file(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*}:importAdaptiveMtFile" + % client.transport._host, + args[1], + ) + + +def test_import_adaptive_mt_file_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.import_adaptive_mt_file( + adaptive_mt.ImportAdaptiveMtFileRequest(), + parent="parent_value", + ) + + +def test_import_adaptive_mt_file_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.ListAdaptiveMtFilesRequest, + dict, + ], +) +def test_list_adaptive_mt_files_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.ListAdaptiveMtFilesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.ListAdaptiveMtFilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_adaptive_mt_files(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAdaptiveMtFilesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_adaptive_mt_files_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_adaptive_mt_files + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_adaptive_mt_files + ] = mock_rpc + + request = {} + client.list_adaptive_mt_files(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_adaptive_mt_files(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_adaptive_mt_files_rest_required_fields( + request_type=adaptive_mt.ListAdaptiveMtFilesRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_adaptive_mt_files._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_adaptive_mt_files._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.ListAdaptiveMtFilesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = adaptive_mt.ListAdaptiveMtFilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_adaptive_mt_files(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_adaptive_mt_files_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_adaptive_mt_files._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_adaptive_mt_files_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_list_adaptive_mt_files" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_list_adaptive_mt_files" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = adaptive_mt.ListAdaptiveMtFilesRequest.pb( + adaptive_mt.ListAdaptiveMtFilesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = adaptive_mt.ListAdaptiveMtFilesResponse.to_json( + adaptive_mt.ListAdaptiveMtFilesResponse() + ) + + request = adaptive_mt.ListAdaptiveMtFilesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = adaptive_mt.ListAdaptiveMtFilesResponse() + + client.list_adaptive_mt_files( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_adaptive_mt_files_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.ListAdaptiveMtFilesRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_adaptive_mt_files(request) + + +def test_list_adaptive_mt_files_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.ListAdaptiveMtFilesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.ListAdaptiveMtFilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_adaptive_mt_files(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*}/adaptiveMtFiles" + % client.transport._host, + args[1], + ) + + +def test_list_adaptive_mt_files_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_adaptive_mt_files( + adaptive_mt.ListAdaptiveMtFilesRequest(), + parent="parent_value", + ) + + +def test_list_adaptive_mt_files_rest_pager(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + adaptive_mt.ListAdaptiveMtFilesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } + + pager = client.list_adaptive_mt_files(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtFile) for i in results) + + pages = list(client.list_adaptive_mt_files(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.ListAdaptiveMtSentencesRequest, + dict, + ], +) +def test_list_adaptive_mt_sentences_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.ListAdaptiveMtSentencesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.ListAdaptiveMtSentencesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_adaptive_mt_sentences(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAdaptiveMtSentencesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_adaptive_mt_sentences_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_adaptive_mt_sentences + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_adaptive_mt_sentences + ] = mock_rpc + + request = {} + client.list_adaptive_mt_sentences(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_adaptive_mt_sentences(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_adaptive_mt_sentences_rest_required_fields( + request_type=adaptive_mt.ListAdaptiveMtSentencesRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_adaptive_mt_sentences._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_adaptive_mt_sentences._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = adaptive_mt.ListAdaptiveMtSentencesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_adaptive_mt_sentences(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_adaptive_mt_sentences_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_adaptive_mt_sentences._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_adaptive_mt_sentences_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_list_adaptive_mt_sentences" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_list_adaptive_mt_sentences" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = adaptive_mt.ListAdaptiveMtSentencesRequest.pb( + adaptive_mt.ListAdaptiveMtSentencesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = adaptive_mt.ListAdaptiveMtSentencesResponse.to_json( + adaptive_mt.ListAdaptiveMtSentencesResponse() + ) + + request = adaptive_mt.ListAdaptiveMtSentencesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() + + client.list_adaptive_mt_sentences( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_adaptive_mt_sentences_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.ListAdaptiveMtSentencesRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_adaptive_mt_sentences(request) + + +def test_list_adaptive_mt_sentences_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.ListAdaptiveMtSentencesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_adaptive_mt_sentences(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}/adaptiveMtSentences" + % client.transport._host, + args[1], + ) + + +def test_list_adaptive_mt_sentences_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_adaptive_mt_sentences( + adaptive_mt.ListAdaptiveMtSentencesRequest(), + parent="parent_value", + ) + + +def test_list_adaptive_mt_sentences_rest_pager(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + adaptive_mt.ListAdaptiveMtSentencesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } + + pager = client.list_adaptive_mt_sentences(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtSentence) for i in results) + + pages = list(client.list_adaptive_mt_sentences(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + automl_translation.ImportDataRequest, + dict, + ], +) +def test_import_data_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"dataset": "projects/sample1/locations/sample2/datasets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.import_data(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_import_data_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.import_data in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.import_data] = mock_rpc + + request = {} + client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_data(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_import_data_rest_required_fields( + request_type=automl_translation.ImportDataRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["dataset"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_data._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["dataset"] = "dataset_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_data._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "dataset" in jsonified_request + assert jsonified_request["dataset"] == "dataset_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.import_data(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_import_data_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.import_data._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "dataset", + "inputConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_data_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_import_data" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_import_data" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = automl_translation.ImportDataRequest.pb( + automl_translation.ImportDataRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = automl_translation.ImportDataRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.import_data( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_data_rest_bad_request( + transport: str = "rest", request_type=automl_translation.ImportDataRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"dataset": "projects/sample1/locations/sample2/datasets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.import_data(request) + + +def test_import_data_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "dataset": "projects/sample1/locations/sample2/datasets/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + dataset="dataset_value", + input_config=automl_translation.DatasetInputConfig( + input_files=[ + automl_translation.DatasetInputConfig.InputFile(usage="usage_value") + ] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.import_data(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{dataset=projects/*/locations/*/datasets/*}:importData" + % client.transport._host, + args[1], + ) + + +def test_import_data_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.import_data( + automl_translation.ImportDataRequest(), + dataset="dataset_value", + input_config=automl_translation.DatasetInputConfig( + input_files=[ + automl_translation.DatasetInputConfig.InputFile(usage="usage_value") + ] + ), + ) + + +def test_import_data_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + automl_translation.ExportDataRequest, + dict, + ], +) +def test_export_data_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"dataset": "projects/sample1/locations/sample2/datasets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.export_data(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_export_data_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.export_data in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.export_data] = mock_rpc + + request = {} + client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_data(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_export_data_rest_required_fields( + request_type=automl_translation.ExportDataRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["dataset"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_data._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["dataset"] = "dataset_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_data._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "dataset" in jsonified_request + assert jsonified_request["dataset"] == "dataset_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.export_data(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_export_data_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.export_data._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "dataset", + "outputConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_data_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_export_data" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_export_data" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = automl_translation.ExportDataRequest.pb( + automl_translation.ExportDataRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = automl_translation.ExportDataRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_data( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_data_rest_bad_request( + transport: str = "rest", request_type=automl_translation.ExportDataRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"dataset": "projects/sample1/locations/sample2/datasets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_data(request) + + +def test_export_data_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "dataset": "projects/sample1/locations/sample2/datasets/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + dataset="dataset_value", + output_config=automl_translation.DatasetOutputConfig( + gcs_destination=common.GcsOutputDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.export_data(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{dataset=projects/*/locations/*/datasets/*}:exportData" + % client.transport._host, + args[1], + ) + + +def test_export_data_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_data( + automl_translation.ExportDataRequest(), + dataset="dataset_value", + output_config=automl_translation.DatasetOutputConfig( + gcs_destination=common.GcsOutputDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ), + ) + + +def test_export_data_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + automl_translation.ListExamplesRequest, + dict, + ], +) +def test_list_examples_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/datasets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = automl_translation.ListExamplesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = automl_translation.ListExamplesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_examples(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExamplesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_examples_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_examples in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_examples] = mock_rpc + + request = {} + client.list_examples(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_examples(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_examples_rest_required_fields( + request_type=automl_translation.ListExamplesRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_examples._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_examples._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = automl_translation.ListExamplesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = automl_translation.ListExamplesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_examples(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_examples_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_examples._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_examples_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_list_examples" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_list_examples" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = automl_translation.ListExamplesRequest.pb( + automl_translation.ListExamplesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = automl_translation.ListExamplesResponse.to_json( + automl_translation.ListExamplesResponse() + ) + + request = automl_translation.ListExamplesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = automl_translation.ListExamplesResponse() + + client.list_examples( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_examples_rest_bad_request( + transport: str = "rest", request_type=automl_translation.ListExamplesRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/datasets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_examples(request) + + +def test_list_examples_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = automl_translation.ListExamplesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/datasets/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = automl_translation.ListExamplesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_examples(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*/datasets/*}/examples" + % client.transport._host, + args[1], + ) + + +def test_list_examples_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_examples( + automl_translation.ListExamplesRequest(), + parent="parent_value", + ) + + +def test_list_examples_rest_pager(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + automl_translation.ListExamplesResponse( + examples=[ + automl_translation.Example(), + automl_translation.Example(), + automl_translation.Example(), + ], + next_page_token="abc", + ), + automl_translation.ListExamplesResponse( + examples=[], + next_page_token="def", + ), + automl_translation.ListExamplesResponse( + examples=[ + automl_translation.Example(), + ], + next_page_token="ghi", + ), + automl_translation.ListExamplesResponse( + examples=[ + automl_translation.Example(), + automl_translation.Example(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + automl_translation.ListExamplesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/datasets/sample3" + } + + pager = client.list_examples(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, automl_translation.Example) for i in results) + + pages = list(client.list_examples(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + automl_translation.CreateModelRequest, + dict, + ], +) +def test_create_model_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["model"] = { + "name": "name_value", + "display_name": "display_name_value", + "dataset": "dataset_value", + "source_language_code": "source_language_code_value", + "target_language_code": "target_language_code_value", + "train_example_count": 2033, + "validate_example_count": 2333, + "test_example_count": 1939, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = automl_translation.CreateModelRequest.meta.fields["model"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["model"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["model"][field])): + del request_init["model"][field][i][subfield] + else: + del request_init["model"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_model(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_model_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_model in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_model] = mock_rpc + + request = {} + client.create_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_model_rest_required_fields( + request_type=automl_translation.CreateModelRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_model_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_model._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "model", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_model_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_create_model" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_create_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = automl_translation.CreateModelRequest.pb( + automl_translation.CreateModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = automl_translation.CreateModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_model_rest_bad_request( + transport: str = "rest", request_type=automl_translation.CreateModelRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_model(request) + + +def test_create_model_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + model=automl_translation.Model(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_model(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}/models" % client.transport._host, + args[1], + ) + + +def test_create_model_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_model( + automl_translation.CreateModelRequest(), + parent="parent_value", + model=automl_translation.Model(name="name_value"), + ) + + +def test_create_model_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + automl_translation.ListModelsRequest, + dict, + ], +) +def test_list_models_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = automl_translation.ListModelsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = automl_translation.ListModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_models(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListModelsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_models_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_models in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_models] = mock_rpc + + request = {} + client.list_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_models_rest_required_fields( + request_type=automl_translation.ListModelsRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_models._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_models._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = automl_translation.ListModelsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = automl_translation.ListModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_models(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_models_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_models._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_models_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_list_models" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_list_models" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = automl_translation.ListModelsRequest.pb( + automl_translation.ListModelsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = automl_translation.ListModelsResponse.to_json( + automl_translation.ListModelsResponse() + ) + + request = automl_translation.ListModelsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = automl_translation.ListModelsResponse() + + client.list_models( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_models_rest_bad_request( + transport: str = "rest", request_type=automl_translation.ListModelsRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_models(request) + + +def test_list_models_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = automl_translation.ListModelsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = automl_translation.ListModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_models(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}/models" % client.transport._host, + args[1], + ) + + +def test_list_models_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_models( + automl_translation.ListModelsRequest(), + parent="parent_value", + ) + + +def test_list_models_rest_pager(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + automl_translation.ListModelsResponse( + models=[ + automl_translation.Model(), + automl_translation.Model(), + automl_translation.Model(), + ], + next_page_token="abc", + ), + automl_translation.ListModelsResponse( + models=[], + next_page_token="def", + ), + automl_translation.ListModelsResponse( + models=[ + automl_translation.Model(), + ], + next_page_token="ghi", + ), + automl_translation.ListModelsResponse( + models=[ + automl_translation.Model(), + automl_translation.Model(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + automl_translation.ListModelsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_models(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, automl_translation.Model) for i in results) + + pages = list(client.list_models(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + automl_translation.GetModelRequest, + dict, + ], +) +def test_get_model_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/models/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = automl_translation.Model( + name="name_value", + display_name="display_name_value", + dataset="dataset_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + train_example_count=2033, + validate_example_count=2333, + test_example_count=1939, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = automl_translation.Model.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_model(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, automl_translation.Model) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.dataset == "dataset_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.train_example_count == 2033 + assert response.validate_example_count == 2333 + assert response.test_example_count == 1939 + + +def test_get_model_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_model in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_model] = mock_rpc + + request = {} + client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_model_rest_required_fields( + request_type=automl_translation.GetModelRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = automl_translation.Model() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = automl_translation.Model.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_model_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_model_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_get_model" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_get_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = automl_translation.GetModelRequest.pb( + automl_translation.GetModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = automl_translation.Model.to_json( + automl_translation.Model() + ) + + request = automl_translation.GetModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = automl_translation.Model() + + client.get_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_model_rest_bad_request( + transport: str = "rest", request_type=automl_translation.GetModelRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/models/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_model(request) + + +def test_get_model_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = automl_translation.Model() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/models/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = automl_translation.Model.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_model(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{name=projects/*/locations/*/models/*}" % client.transport._host, + args[1], + ) + + +def test_get_model_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_model( + automl_translation.GetModelRequest(), + name="name_value", + ) + + +def test_get_model_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + automl_translation.DeleteModelRequest, + dict, + ], +) +def test_delete_model_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/models/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_model(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_model_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_model in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_model] = mock_rpc + + request = {} + client.delete_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_model_rest_required_fields( + request_type=automl_translation.DeleteModelRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_model_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_model_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_delete_model" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_delete_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = automl_translation.DeleteModelRequest.pb( + automl_translation.DeleteModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = automl_translation.DeleteModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_model_rest_bad_request( + transport: str = "rest", request_type=automl_translation.DeleteModelRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/models/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_model(request) + + +def test_delete_model_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/models/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_model(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{name=projects/*/locations/*/models/*}" % client.transport._host, + args[1], + ) + + +def test_delete_model_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_model( + automl_translation.DeleteModelRequest(), + name="name_value", + ) + + +def test_delete_model_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TranslationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TranslationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TranslationServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TranslationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TranslationServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TranslationServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TranslationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TranslationServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TranslationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TranslationServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.TranslationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.TranslationServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TranslationServiceGrpcTransport, + transports.TranslationServiceGrpcAsyncIOTransport, + transports.TranslationServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = TranslationServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TranslationServiceGrpcTransport, + ) + + +def test_translation_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TranslationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_translation_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.translate_v3.services.translation_service.transports.TranslationServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.TranslationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "translate_text", + "romanize_text", + "detect_language", + "get_supported_languages", + "translate_document", + "batch_translate_text", + "batch_translate_document", + "create_glossary", + "update_glossary", + "list_glossaries", + "get_glossary", + "delete_glossary", + "get_glossary_entry", + "list_glossary_entries", + "create_glossary_entry", + "update_glossary_entry", + "delete_glossary_entry", + "create_dataset", + "get_dataset", + "list_datasets", + "delete_dataset", + "create_adaptive_mt_dataset", + "delete_adaptive_mt_dataset", + "get_adaptive_mt_dataset", + "list_adaptive_mt_datasets", + "adaptive_mt_translate", + "get_adaptive_mt_file", + "delete_adaptive_mt_file", + "import_adaptive_mt_file", + "list_adaptive_mt_files", + "list_adaptive_mt_sentences", + "import_data", + "export_data", + "list_examples", + "create_model", + "list_models", + "get_model", + "delete_model", + "get_location", + "list_locations", + "get_operation", + "wait_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_translation_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.translate_v3.services.translation_service.transports.TranslationServiceTransport._prep_wrapped_messages" @@ -16535,746 +30488,2317 @@ def test_translation_service_base_transport_with_credentials_file(): credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-translation", - ), - quota_project_id="octopus", + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-translation", + ), + quota_project_id="octopus", + ) + + +def test_translation_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.translate_v3.services.translation_service.transports.TranslationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TranslationServiceTransport() + adc.assert_called_once() + + +def test_translation_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TranslationServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-translation", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TranslationServiceGrpcTransport, + transports.TranslationServiceGrpcAsyncIOTransport, + ], +) +def test_translation_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-translation", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TranslationServiceGrpcTransport, + transports.TranslationServiceGrpcAsyncIOTransport, + transports.TranslationServiceRestTransport, + ], +) +def test_translation_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.TranslationServiceGrpcTransport, grpc_helpers), + (transports.TranslationServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_translation_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "translate.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-translation", + ), + scopes=["1", "2"], + default_host="translate.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TranslationServiceGrpcTransport, + transports.TranslationServiceGrpcAsyncIOTransport, + ], +) +def test_translation_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_translation_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.TranslationServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_translation_service_rest_lro_client(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_translation_service_host_no_port(transport_name): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="translate.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "translate.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://translate.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_translation_service_host_with_port(transport_name): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="translate.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "translate.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://translate.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_translation_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TranslationServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TranslationServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.translate_text._session + session2 = client2.transport.translate_text._session + assert session1 != session2 + session1 = client1.transport.romanize_text._session + session2 = client2.transport.romanize_text._session + assert session1 != session2 + session1 = client1.transport.detect_language._session + session2 = client2.transport.detect_language._session + assert session1 != session2 + session1 = client1.transport.get_supported_languages._session + session2 = client2.transport.get_supported_languages._session + assert session1 != session2 + session1 = client1.transport.translate_document._session + session2 = client2.transport.translate_document._session + assert session1 != session2 + session1 = client1.transport.batch_translate_text._session + session2 = client2.transport.batch_translate_text._session + assert session1 != session2 + session1 = client1.transport.batch_translate_document._session + session2 = client2.transport.batch_translate_document._session + assert session1 != session2 + session1 = client1.transport.create_glossary._session + session2 = client2.transport.create_glossary._session + assert session1 != session2 + session1 = client1.transport.update_glossary._session + session2 = client2.transport.update_glossary._session + assert session1 != session2 + session1 = client1.transport.list_glossaries._session + session2 = client2.transport.list_glossaries._session + assert session1 != session2 + session1 = client1.transport.get_glossary._session + session2 = client2.transport.get_glossary._session + assert session1 != session2 + session1 = client1.transport.delete_glossary._session + session2 = client2.transport.delete_glossary._session + assert session1 != session2 + session1 = client1.transport.get_glossary_entry._session + session2 = client2.transport.get_glossary_entry._session + assert session1 != session2 + session1 = client1.transport.list_glossary_entries._session + session2 = client2.transport.list_glossary_entries._session + assert session1 != session2 + session1 = client1.transport.create_glossary_entry._session + session2 = client2.transport.create_glossary_entry._session + assert session1 != session2 + session1 = client1.transport.update_glossary_entry._session + session2 = client2.transport.update_glossary_entry._session + assert session1 != session2 + session1 = client1.transport.delete_glossary_entry._session + session2 = client2.transport.delete_glossary_entry._session + assert session1 != session2 + session1 = client1.transport.create_dataset._session + session2 = client2.transport.create_dataset._session + assert session1 != session2 + session1 = client1.transport.get_dataset._session + session2 = client2.transport.get_dataset._session + assert session1 != session2 + session1 = client1.transport.list_datasets._session + session2 = client2.transport.list_datasets._session + assert session1 != session2 + session1 = client1.transport.delete_dataset._session + session2 = client2.transport.delete_dataset._session + assert session1 != session2 + session1 = client1.transport.create_adaptive_mt_dataset._session + session2 = client2.transport.create_adaptive_mt_dataset._session + assert session1 != session2 + session1 = client1.transport.delete_adaptive_mt_dataset._session + session2 = client2.transport.delete_adaptive_mt_dataset._session + assert session1 != session2 + session1 = client1.transport.get_adaptive_mt_dataset._session + session2 = client2.transport.get_adaptive_mt_dataset._session + assert session1 != session2 + session1 = client1.transport.list_adaptive_mt_datasets._session + session2 = client2.transport.list_adaptive_mt_datasets._session + assert session1 != session2 + session1 = client1.transport.adaptive_mt_translate._session + session2 = client2.transport.adaptive_mt_translate._session + assert session1 != session2 + session1 = client1.transport.get_adaptive_mt_file._session + session2 = client2.transport.get_adaptive_mt_file._session + assert session1 != session2 + session1 = client1.transport.delete_adaptive_mt_file._session + session2 = client2.transport.delete_adaptive_mt_file._session + assert session1 != session2 + session1 = client1.transport.import_adaptive_mt_file._session + session2 = client2.transport.import_adaptive_mt_file._session + assert session1 != session2 + session1 = client1.transport.list_adaptive_mt_files._session + session2 = client2.transport.list_adaptive_mt_files._session + assert session1 != session2 + session1 = client1.transport.list_adaptive_mt_sentences._session + session2 = client2.transport.list_adaptive_mt_sentences._session + assert session1 != session2 + session1 = client1.transport.import_data._session + session2 = client2.transport.import_data._session + assert session1 != session2 + session1 = client1.transport.export_data._session + session2 = client2.transport.export_data._session + assert session1 != session2 + session1 = client1.transport.list_examples._session + session2 = client2.transport.list_examples._session + assert session1 != session2 + session1 = client1.transport.create_model._session + session2 = client2.transport.create_model._session + assert session1 != session2 + session1 = client1.transport.list_models._session + session2 = client2.transport.list_models._session + assert session1 != session2 + session1 = client1.transport.get_model._session + session2 = client2.transport.get_model._session + assert session1 != session2 + session1 = client1.transport.delete_model._session + session2 = client2.transport.delete_model._session + assert session1 != session2 + + +def test_translation_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TranslationServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_translation_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TranslationServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.TranslationServiceGrpcTransport, + transports.TranslationServiceGrpcAsyncIOTransport, + ], +) +def test_translation_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.TranslationServiceGrpcTransport, + transports.TranslationServiceGrpcAsyncIOTransport, + ], +) +def test_translation_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_translation_service_grpc_lro_client(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_translation_service_grpc_lro_async_client(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_adaptive_mt_dataset_path(): + project = "squid" + location = "clam" + dataset = "whelk" + expected = ( + "projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}".format( + project=project, + location=location, + dataset=dataset, + ) + ) + actual = TranslationServiceClient.adaptive_mt_dataset_path( + project, location, dataset + ) + assert expected == actual + + +def test_parse_adaptive_mt_dataset_path(): + expected = { + "project": "octopus", + "location": "oyster", + "dataset": "nudibranch", + } + path = TranslationServiceClient.adaptive_mt_dataset_path(**expected) + + # Check that the path construction is reversible. + actual = TranslationServiceClient.parse_adaptive_mt_dataset_path(path) + assert expected == actual + + +def test_adaptive_mt_file_path(): + project = "cuttlefish" + location = "mussel" + dataset = "winkle" + file = "nautilus" + expected = "projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}".format( + project=project, + location=location, + dataset=dataset, + file=file, + ) + actual = TranslationServiceClient.adaptive_mt_file_path( + project, location, dataset, file + ) + assert expected == actual + + +def test_parse_adaptive_mt_file_path(): + expected = { + "project": "scallop", + "location": "abalone", + "dataset": "squid", + "file": "clam", + } + path = TranslationServiceClient.adaptive_mt_file_path(**expected) + + # Check that the path construction is reversible. + actual = TranslationServiceClient.parse_adaptive_mt_file_path(path) + assert expected == actual + + +def test_adaptive_mt_sentence_path(): + project = "whelk" + location = "octopus" + dataset = "oyster" + file = "nudibranch" + sentence = "cuttlefish" + expected = "projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}/adaptiveMtSentences/{sentence}".format( + project=project, + location=location, + dataset=dataset, + file=file, + sentence=sentence, + ) + actual = TranslationServiceClient.adaptive_mt_sentence_path( + project, location, dataset, file, sentence + ) + assert expected == actual + + +def test_parse_adaptive_mt_sentence_path(): + expected = { + "project": "mussel", + "location": "winkle", + "dataset": "nautilus", + "file": "scallop", + "sentence": "abalone", + } + path = TranslationServiceClient.adaptive_mt_sentence_path(**expected) + + # Check that the path construction is reversible. + actual = TranslationServiceClient.parse_adaptive_mt_sentence_path(path) + assert expected == actual + + +def test_dataset_path(): + project = "squid" + location = "clam" + dataset = "whelk" + expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( + project=project, + location=location, + dataset=dataset, + ) + actual = TranslationServiceClient.dataset_path(project, location, dataset) + assert expected == actual + + +def test_parse_dataset_path(): + expected = { + "project": "octopus", + "location": "oyster", + "dataset": "nudibranch", + } + path = TranslationServiceClient.dataset_path(**expected) + + # Check that the path construction is reversible. + actual = TranslationServiceClient.parse_dataset_path(path) + assert expected == actual + + +def test_example_path(): + project = "cuttlefish" + location = "mussel" + dataset = "winkle" + example = "nautilus" + expected = "projects/{project}/locations/{location}/datasets/{dataset}/examples/{example}".format( + project=project, + location=location, + dataset=dataset, + example=example, + ) + actual = TranslationServiceClient.example_path(project, location, dataset, example) + assert expected == actual + + +def test_parse_example_path(): + expected = { + "project": "scallop", + "location": "abalone", + "dataset": "squid", + "example": "clam", + } + path = TranslationServiceClient.example_path(**expected) + + # Check that the path construction is reversible. + actual = TranslationServiceClient.parse_example_path(path) + assert expected == actual + + +def test_glossary_path(): + project = "whelk" + location = "octopus" + glossary = "oyster" + expected = "projects/{project}/locations/{location}/glossaries/{glossary}".format( + project=project, + location=location, + glossary=glossary, + ) + actual = TranslationServiceClient.glossary_path(project, location, glossary) + assert expected == actual + + +def test_parse_glossary_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "glossary": "mussel", + } + path = TranslationServiceClient.glossary_path(**expected) + + # Check that the path construction is reversible. + actual = TranslationServiceClient.parse_glossary_path(path) + assert expected == actual + + +def test_glossary_entry_path(): + project = "winkle" + location = "nautilus" + glossary = "scallop" + glossary_entry = "abalone" + expected = "projects/{project}/locations/{location}/glossaries/{glossary}/glossaryEntries/{glossary_entry}".format( + project=project, + location=location, + glossary=glossary, + glossary_entry=glossary_entry, + ) + actual = TranslationServiceClient.glossary_entry_path( + project, location, glossary, glossary_entry + ) + assert expected == actual + + +def test_parse_glossary_entry_path(): + expected = { + "project": "squid", + "location": "clam", + "glossary": "whelk", + "glossary_entry": "octopus", + } + path = TranslationServiceClient.glossary_entry_path(**expected) + + # Check that the path construction is reversible. + actual = TranslationServiceClient.parse_glossary_entry_path(path) + assert expected == actual + + +def test_model_path(): + project = "oyster" + location = "nudibranch" + model = "cuttlefish" + expected = "projects/{project}/locations/{location}/models/{model}".format( + project=project, + location=location, + model=model, + ) + actual = TranslationServiceClient.model_path(project, location, model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "project": "mussel", + "location": "winkle", + "model": "nautilus", + } + path = TranslationServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = TranslationServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = TranslationServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = TranslationServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TranslationServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = TranslationServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = TranslationServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TranslationServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = TranslationServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = TranslationServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TranslationServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = TranslationServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = TranslationServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TranslationServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = TranslationServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = TranslationServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TranslationServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.TranslationServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.TranslationServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = TranslationServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_wait_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.WaitOperationRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.wait_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.WaitOperationRequest, + dict, + ], +) +def test_wait_operation_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.wait_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_delete_operation(transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_translation_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.translate_v3.services.translation_service.transports.TranslationServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TranslationServiceTransport() - adc.assert_called_once() + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_translation_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TranslationServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-translation", - ), - quota_project_id=None, +def test_cancel_operation_from_dict(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } ) + call.assert_called() -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranslationServiceGrpcTransport, - transports.TranslationServiceGrpcAsyncIOTransport, - ], -) -def test_translation_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-translation", - ), - quota_project_id="octopus", +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } ) + call.assert_called() -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranslationServiceGrpcTransport, - transports.TranslationServiceGrpcAsyncIOTransport, - transports.TranslationServiceRestTransport, - ], -) -def test_translation_service_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) +def test_wait_operation(transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.WaitOperationRequest() -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.TranslationServiceGrpcTransport, grpc_helpers), - (transports.TranslationServiceGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_translation_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - create_channel.assert_called_with( - "translate.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-translation", - ), - scopes=["1", "2"], - default_host="translate.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranslationServiceGrpcTransport, - transports.TranslationServiceGrpcAsyncIOTransport, - ], -) -def test_translation_service_grpc_transport_client_cert_source_for_mtls( - transport_class, -): - cred = ga_credentials.AnonymousCredentials() +@pytest.mark.asyncio +async def test_wait_operation(transport: str = "grpc_asyncio"): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.WaitOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], + response = await client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_wait_operation_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.WaitOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_wait_operation_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.WaitOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() ) + await client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_translation_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.TranslationServiceRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback +def test_wait_operation_from_dict(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.wait_operation( + request={ + "name": "locations", + } ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + call.assert_called() -def test_translation_service_rest_lro_client(): - client = TranslationServiceClient( +@pytest.mark.asyncio +async def test_wait_operation_from_dict_async(): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - transport = client.transport + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.wait_operation( + request={ + "name": "locations", + } + ) + call.assert_called() - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, + +def test_get_operation(transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_translation_service_host_no_port(transport_name): - client = TranslationServiceClient( + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="translate.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "translate.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://translate.googleapis.com" + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_translation_service_host_with_port(transport_name): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="translate.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "translate.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://translate.googleapis.com:8000" ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_translation_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = TranslationServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = TranslationServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.translate_text._session - session2 = client2.transport.translate_text._session - assert session1 != session2 - session1 = client1.transport.detect_language._session - session2 = client2.transport.detect_language._session - assert session1 != session2 - session1 = client1.transport.get_supported_languages._session - session2 = client2.transport.get_supported_languages._session - assert session1 != session2 - session1 = client1.transport.translate_document._session - session2 = client2.transport.translate_document._session - assert session1 != session2 - session1 = client1.transport.batch_translate_text._session - session2 = client2.transport.batch_translate_text._session - assert session1 != session2 - session1 = client1.transport.batch_translate_document._session - session2 = client2.transport.batch_translate_document._session - assert session1 != session2 - session1 = client1.transport.create_glossary._session - session2 = client2.transport.create_glossary._session - assert session1 != session2 - session1 = client1.transport.list_glossaries._session - session2 = client2.transport.list_glossaries._session - assert session1 != session2 - session1 = client1.transport.get_glossary._session - session2 = client2.transport.get_glossary._session - assert session1 != session2 - session1 = client1.transport.delete_glossary._session - session2 = client2.transport.delete_glossary._session - assert session1 != session2 - session1 = client1.transport.create_adaptive_mt_dataset._session - session2 = client2.transport.create_adaptive_mt_dataset._session - assert session1 != session2 - session1 = client1.transport.delete_adaptive_mt_dataset._session - session2 = client2.transport.delete_adaptive_mt_dataset._session - assert session1 != session2 - session1 = client1.transport.get_adaptive_mt_dataset._session - session2 = client2.transport.get_adaptive_mt_dataset._session - assert session1 != session2 - session1 = client1.transport.list_adaptive_mt_datasets._session - session2 = client2.transport.list_adaptive_mt_datasets._session - assert session1 != session2 - session1 = client1.transport.adaptive_mt_translate._session - session2 = client2.transport.adaptive_mt_translate._session - assert session1 != session2 - session1 = client1.transport.get_adaptive_mt_file._session - session2 = client2.transport.get_adaptive_mt_file._session - assert session1 != session2 - session1 = client1.transport.delete_adaptive_mt_file._session - session2 = client2.transport.delete_adaptive_mt_file._session - assert session1 != session2 - session1 = client1.transport.import_adaptive_mt_file._session - session2 = client2.transport.import_adaptive_mt_file._session - assert session1 != session2 - session1 = client1.transport.list_adaptive_mt_files._session - session2 = client2.transport.list_adaptive_mt_files._session - assert session1 != session2 - session1 = client1.transport.list_adaptive_mt_sentences._session - session2 = client2.transport.list_adaptive_mt_sentences._session - assert session1 != session2 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_translation_service_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - # Check that channel is used if provided. - transport = transports.TranslationServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" -def test_translation_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that channel is used if provided. - transport = transports.TranslationServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranslationServiceGrpcTransport, - transports.TranslationServiceGrpcAsyncIOTransport, - ], -) -def test_translation_service_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() +def test_list_operations(transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranslationServiceGrpcTransport, - transports.TranslationServiceGrpcAsyncIOTransport, - ], -) -def test_translation_service_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() -def test_translation_service_grpc_lro_client(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) - transport = client.transport - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_translation_service_grpc_lro_async_client(): + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) - transport = client.transport - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_adaptive_mt_dataset_path(): - project = "squid" - location = "clam" - dataset = "whelk" - expected = ( - "projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, - ) - ) - actual = TranslationServiceClient.adaptive_mt_dataset_path( - project, location, dataset + +def test_list_operations_from_dict(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() -def test_parse_adaptive_mt_dataset_path(): - expected = { - "project": "octopus", - "location": "oyster", - "dataset": "nudibranch", - } - path = TranslationServiceClient.adaptive_mt_dataset_path(**expected) - # Check that the path construction is reversible. - actual = TranslationServiceClient.parse_adaptive_mt_dataset_path(path) - assert expected == actual +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() -def test_adaptive_mt_file_path(): - project = "cuttlefish" - location = "mussel" - dataset = "winkle" - file = "nautilus" - expected = "projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}".format( - project=project, - location=location, - dataset=dataset, - file=file, - ) - actual = TranslationServiceClient.adaptive_mt_file_path( - project, location, dataset, file +def test_list_locations(transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() -def test_parse_adaptive_mt_file_path(): - expected = { - "project": "scallop", - "location": "abalone", - "dataset": "squid", - "file": "clam", - } - path = TranslationServiceClient.adaptive_mt_file_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = TranslationServiceClient.parse_adaptive_mt_file_path(path) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) -def test_adaptive_mt_sentence_path(): - project = "whelk" - location = "octopus" - dataset = "oyster" - file = "nudibranch" - sentence = "cuttlefish" - expected = "projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}/adaptiveMtSentences/{sentence}".format( - project=project, - location=location, - dataset=dataset, - file=file, - sentence=sentence, - ) - actual = TranslationServiceClient.adaptive_mt_sentence_path( - project, location, dataset, file, sentence +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() -def test_parse_adaptive_mt_sentence_path(): - expected = { - "project": "mussel", - "location": "winkle", - "dataset": "nautilus", - "file": "scallop", - "sentence": "abalone", - } - path = TranslationServiceClient.adaptive_mt_sentence_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = TranslationServiceClient.parse_adaptive_mt_sentence_path(path) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) -def test_glossary_path(): - project = "squid" - location = "clam" - glossary = "whelk" - expected = "projects/{project}/locations/{location}/glossaries/{glossary}".format( - project=project, - location=location, - glossary=glossary, +def test_list_locations_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = TranslationServiceClient.glossary_path(project, location, glossary) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" -def test_parse_glossary_path(): - expected = { - "project": "octopus", - "location": "oyster", - "glossary": "nudibranch", - } - path = TranslationServiceClient.glossary_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() - # Check that the path construction is reversible. - actual = TranslationServiceClient.parse_glossary_path(path) - assert expected == actual + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = TranslationServiceClient.common_billing_account_path(billing_account) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = TranslationServiceClient.common_billing_account_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = TranslationServiceClient.parse_common_billing_account_path(path) - assert expected == actual + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, +def test_list_locations_from_dict(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = TranslationServiceClient.common_folder_path(folder) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = TranslationServiceClient.common_folder_path(**expected) - # Check that the path construction is reversible. - actual = TranslationServiceClient.parse_common_folder_path(path) - assert expected == actual +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, +def test_get_location(transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - actual = TranslationServiceClient.common_organization_path(organization) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = TranslationServiceClient.common_organization_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = TranslationServiceClient.parse_common_organization_path(path) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format( - project=project, +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - actual = TranslationServiceClient.common_project_path(project) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = TranslationServiceClient.common_project_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = TranslationServiceClient.parse_common_project_path(path) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = TranslationServiceClient.common_location_path(project, location) - assert expected == actual +def test_get_location_field_headers(): + client = TranslationServiceClient(credentials=ga_credentials.AnonymousCredentials()) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = TranslationServiceClient.common_location_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() - # Check that the path construction is reversible. - actual = TranslationServiceClient.parse_common_location_path(path) - assert expected == actual + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) - with mock.patch.object( - transports.TranslationServiceTransport, "_prep_wrapped_messages" - ) as prep: - client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() ) - prep.assert_called_once_with(client_info) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - with mock.patch.object( - transports.TranslationServiceTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = TranslationServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } ) - prep.assert_called_once_with(client_info) + call.assert_called() @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_get_location_from_dict_async(): client = TranslationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() def test_transport_close():