From 066bac15e9d1bd4f12d4d0c2bb83e63c8f16e8c7 Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Tue, 2 Mar 2021 10:19:22 -0800 Subject: [PATCH] chore: remove unneeded Translation and NL references (#129) * chore: remove unneeded Translation and NL references * fix: tests --- automl/beta/delete_dataset_test.py | 4 +- automl/beta/delete_model_test.py | 2 +- automl/beta/import_dataset_test.py | 4 +- automl/snippets/automl_translation_dataset.py | 106 ----------- automl/snippets/automl_translation_model.py | 176 ------------------ automl/snippets/dataset_test.py | 45 ----- automl/snippets/model_test.py | 36 ---- 7 files changed, 5 insertions(+), 368 deletions(-) delete mode 100755 automl/snippets/automl_translation_dataset.py delete mode 100755 automl/snippets/automl_translation_model.py delete mode 100644 automl/snippets/dataset_test.py delete mode 100644 automl/snippets/model_test.py diff --git a/automl/beta/delete_dataset_test.py b/automl/beta/delete_dataset_test.py index 181db832d20d..cf3cdff56693 100644 --- a/automl/beta/delete_dataset_test.py +++ b/automl/beta/delete_dataset_test.py @@ -29,9 +29,9 @@ def dataset_id(): client = automl.AutoMlClient() project_location = f"projects/{PROJECT_ID}/locations/us-central1" display_name = "test_{}".format(uuid.uuid4()).replace("-", "")[:32] - metadata = automl.TextExtractionDatasetMetadata() + metadata = automl.VideoClassificationDatasetMetadata() dataset = automl.Dataset( - display_name=display_name, text_extraction_dataset_metadata=metadata + display_name=display_name, video_classification_dataset_metadata=metadata ) response = client.create_dataset(parent=project_location, dataset=dataset) dataset_id = response.name.split("/")[-1] diff --git a/automl/beta/delete_model_test.py b/automl/beta/delete_model_test.py index 1d3548f3d5ed..2d1fc2da0321 100644 --- a/automl/beta/delete_model_test.py +++ b/automl/beta/delete_model_test.py @@ -24,7 +24,7 @@ def test_delete_model(capsys): # nonexistent model and confirm that the model was not found, but other # elements of the request were valid. try: - delete_model.delete_model(PROJECT_ID, "TRL0000000000000000000") + delete_model.delete_model(PROJECT_ID, "VCN0000000000000000000") out, _ = capsys.readouterr() assert "The model does not exist" in out except Exception as e: diff --git a/automl/beta/import_dataset_test.py b/automl/beta/import_dataset_test.py index 35d23edc7e8f..05262094fb12 100644 --- a/automl/beta/import_dataset_test.py +++ b/automl/beta/import_dataset_test.py @@ -18,7 +18,7 @@ PROJECT_ID = os.environ["AUTOML_PROJECT_ID"] BUCKET_ID = "{}-lcm".format(PROJECT_ID) -DATASET_ID = "TEN0000000000000000000" +DATASET_ID = "VCN0000000000000000000" def test_import_dataset(capsys): @@ -27,7 +27,7 @@ def test_import_dataset(capsys): # confirm that the dataset was not found, but other elements of the request # were valid. try: - data = "gs://{}/sentiment-analysis/dataset.csv".format(BUCKET_ID) + data = "gs://{}/video-classification/dataset.csv".format(BUCKET_ID) import_dataset.import_dataset(PROJECT_ID, DATASET_ID, data) out, _ = capsys.readouterr() assert ( diff --git a/automl/snippets/automl_translation_dataset.py b/automl/snippets/automl_translation_dataset.py deleted file mode 100755 index 674c2c7c485a..000000000000 --- a/automl/snippets/automl_translation_dataset.py +++ /dev/null @@ -1,106 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This application demonstrates how to perform basic operations on dataset -with the Google AutoML Translation API. - -For more information, see the documentation at -https://cloud.google.com/translate/automl/docs -""" - -import argparse -import os - - -def import_data(project_id, compute_region, dataset_id, path): - """Import sentence pairs to the dataset.""" - # [START automl_translate_import_data] - # TODO(developer): Uncomment and set the following variables - # project_id = 'PROJECT_ID_HERE' - # compute_region = 'COMPUTE_REGION_HERE' - # dataset_id = 'DATASET_ID_HERE' - # path = 'gs://path/to/file.csv' - - from google.cloud import automl_v1beta1 as automl - - client = automl.AutoMlClient() - - # Get the full path of the dataset. - dataset_full_id = client.dataset_path(project_id, compute_region, dataset_id) - - # Get the multiple Google Cloud Storage URIs - input_uris = path.split(",") - input_config = {"gcs_source": {"input_uris": input_uris}} - - # Import data from the input URI - response = client.import_data(name=dataset_full_id, input_config=input_config) - - print("Processing import...") - # synchronous check of operation status - print("Data imported. {}".format(response.result())) - - # [END automl_translate_import_data] - - -def delete_dataset(project_id, compute_region, dataset_id): - """Delete a dataset.""" - # [START automl_translate_delete_dataset] - # TODO(developer): Uncomment and set the following variables - # project_id = 'PROJECT_ID_HERE' - # compute_region = 'COMPUTE_REGION_HERE' - # dataset_id = 'DATASET_ID_HERE' - - from google.cloud import automl_v1beta1 as automl - - client = automl.AutoMlClient() - - # Get the full path of the dataset. - dataset_full_id = client.dataset_path(project_id, compute_region, dataset_id) - - # Delete a dataset. - response = client.delete_dataset(name=dataset_full_id) - - # synchronous check of operation status - print("Dataset deleted. {}".format(response.result())) - - # [END automl_translate_delete_dataset] - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter, - ) - subparsers = parser.add_subparsers(dest="command") - - import_data_parser = subparsers.add_parser("import_data", help=import_data.__doc__) - import_data_parser.add_argument("dataset_id") - import_data_parser.add_argument("path") - - delete_dataset_parser = subparsers.add_parser( - "delete_dataset", help=delete_dataset.__doc__ - ) - delete_dataset_parser.add_argument("dataset_id") - - project_id = os.environ["PROJECT_ID"] - compute_region = os.environ["REGION_NAME"] - - args = parser.parse_args() - - if args.command == "import_data": - import_data(project_id, compute_region, args.dataset_id, args.path) - if args.command == "delete_dataset": - delete_dataset(project_id, compute_region, args.dataset_id) diff --git a/automl/snippets/automl_translation_model.py b/automl/snippets/automl_translation_model.py deleted file mode 100755 index 1b1e45c91a52..000000000000 --- a/automl/snippets/automl_translation_model.py +++ /dev/null @@ -1,176 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This application demonstrates how to perform basic operations on model -with the Google AutoML Translation API. - -For more information, see the documentation at -https://cloud.google.com/translate/automl/docs -""" - -import argparse -import os - - -def list_models(project_id, compute_region, filter_): - """List all models.""" - # [START automl_translate_list_models] - # TODO(developer): Uncomment and set the following variables - # project_id = 'PROJECT_ID_HERE' - # compute_region = 'COMPUTE_REGION_HERE' - # filter_ = 'DATASET_ID_HERE' - - from google.cloud import automl_v1beta1 as automl - - client = automl.AutoMlClient() - - # A resource that represents Google Cloud Platform location. - project_location = f"projects/{project_id}/locations/{compute_region}" - - # List all the models available in the region by applying filter. - request = automl.ListModelsRequest(parent=project_location, filter=filter_) - response = client.list_models(request=request) - - print("List of models:") - for model in response: - # Display the model information. - if model.deployment_state == automl.Model.DeploymentState.DEPLOYED: - deployment_state = "deployed" - else: - deployment_state = "undeployed" - - print("Model name: {}".format(model.name)) - print("Model id: {}".format(model.name.split("/")[-1])) - print("Model display name: {}".format(model.display_name)) - print("Model create time: {}".format(model.create_time)) - print("Model deployment state: {}".format(deployment_state)) - - # [END automl_translate_list_models] - - -def get_model(project_id, compute_region, model_id): - """Get model details.""" - # [START automl_translate_get_model] - # TODO(developer): Uncomment and set the following variables - # project_id = 'PROJECT_ID_HERE' - # compute_region = 'COMPUTE_REGION_HERE' - # model_id = 'MODEL_ID_HERE' - - from google.cloud import automl_v1beta1 as automl - - client = automl.AutoMlClient() - - # Get the full path of the model. - model_full_id = client.model_path(project_id, compute_region, model_id) - - # Get complete detail of the model. - model = client.get_model(name=model_full_id) - - # Retrieve deployment state. - if model.deployment_state == automl.Model.DeploymentState.DEPLOYED: - deployment_state = "deployed" - else: - deployment_state = "undeployed" - - # Display the model information. - print("Model name: {}".format(model.name)) - print("Model id: {}".format(model.name.split("/")[-1])) - print("Model display name: {}".format(model.display_name)) - print("Model create time: {}".format(model.create_time)) - print("Model deployment state: {}".format(deployment_state)) - - # [END automl_translate_get_model] - - -def delete_model(project_id, compute_region, model_id): - """Delete a model.""" - # [START automl_translate_delete_model] - # TODO(developer): Uncomment and set the following variables - # project_id = 'PROJECT_ID_HERE' - # compute_region = 'COMPUTE_REGION_HERE' - # model_id = 'MODEL_ID_HERE' - - from google.cloud import automl_v1beta1 as automl - - client = automl.AutoMlClient() - - # Get the full path of the model. - model_full_id = client.model_path(project_id, compute_region, model_id) - - # Delete a model. - response = client.delete_model(name=model_full_id) - - # synchronous check of operation status. - print("Model deleted. {}".format(response.result())) - - # [END automl_translate_delete_model] - - -def get_operation_status(operation_full_id): - """Get operation status.""" - # [START automl_translate_get_operation_status] - # TODO(developer): Uncomment and set the following variables - # operation_full_id = - # 'projects//locations//operations/' - - from google.cloud import automl_v1beta1 as automl - - client = automl.AutoMlClient() - - # Get the latest state of a long-running operation. - response = client._transport.operations_client.get_operation(operation_full_id) - - print("Operation status: {}".format(response)) - - # [END automl_translate_get_operation_status] - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter, - ) - subparsers = parser.add_subparsers(dest="command") - - get_model_parser = subparsers.add_parser("get_model", help=get_model.__doc__) - get_model_parser.add_argument("model_id") - - get_operation_status_parser = subparsers.add_parser( - "get_operation_status", help=get_operation_status.__doc__ - ) - get_operation_status_parser.add_argument("operation_full_id") - - list_models_parser = subparsers.add_parser("list_models", help=list_models.__doc__) - list_models_parser.add_argument("filter", nargs="?", default="") - - delete_model_parser = subparsers.add_parser( - "delete_model", help=delete_model.__doc__ - ) - delete_model_parser.add_argument("model_id") - - project_id = os.environ["PROJECT_ID"] - compute_region = os.environ["REGION_NAME"] - - args = parser.parse_args() - - if args.command == "list_models": - list_models(project_id, compute_region, args.filter) - if args.command == "get_model": - get_model(project_id, compute_region, args.model_id) - if args.command == "delete_model": - delete_model(project_id, compute_region, args.model_id) - if args.command == "get_operation_status": - get_operation_status(args.operation_full_id) diff --git a/automl/snippets/dataset_test.py b/automl/snippets/dataset_test.py deleted file mode 100644 index a2512d57e40a..000000000000 --- a/automl/snippets/dataset_test.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -import automl_translation_dataset - -PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] -BUCKET_ID = "{}-lcm".format(PROJECT_ID) -COMPUTE_REGION = "us-central1" -DATASET_ID = "TRL00000000000000" - - -def test_import_dataset(capsys): - # As importing a dataset can take a long time and only four operations can - # be run on a dataset at once. Try to import into a nonexistent dataset and - # confirm that the dataset was not found, but other elements of the request - # were valid. - try: - data = "gs://{}/sentiment-analysis/dataset.csv".format(BUCKET_ID) - automl_translation_dataset.import_data( - PROJECT_ID, COMPUTE_REGION, DATASET_ID, data - ) - out, _ = capsys.readouterr() - assert ( - "The Dataset doesn't exist or is inaccessible for use with AutoMl." in out - ) - except Exception as e: - assert ( - "The Dataset doesn't exist or is inaccessible for use with AutoMl." - in e.message - ) diff --git a/automl/snippets/model_test.py b/automl/snippets/model_test.py deleted file mode 100644 index c64e45ecd591..000000000000 --- a/automl/snippets/model_test.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -import automl_translation_model - -project_id = os.environ["GOOGLE_CLOUD_PROJECT"] -compute_region = "us-central1" - - -def test_model_list_get_evaluate(capsys): - # list models - automl_translation_model.list_models(project_id, compute_region, "") - out, _ = capsys.readouterr() - list_models_output = out.splitlines() - assert "Model id: " in list_models_output[2] - - # get model - model_id = list_models_output[2].split()[2] - automl_translation_model.get_model(project_id, compute_region, model_id) - out, _ = capsys.readouterr() - assert "Model name: " in out