diff --git a/QUICK_START_GUIDE.md b/QUICK_START_GUIDE.md index 56c2aab5c03..9bf7ddbdac6 100644 --- a/QUICK_START_GUIDE.md +++ b/QUICK_START_GUIDE.md @@ -14,17 +14,13 @@ git checkout -b develop origin/develop git submodule update --init --recursive ``` -2. Export `OTE_SDK_PATH` environment variable to use it inside our scripts: - ``` - export OTE_SDK_PATH=`pwd`/ote_sdk - ``` -3. Install prerequisites by running the following: +2. Install prerequisites by running the following: ``` sudo apt-get install python3-pip python3-venv ``` -4. Search for available scripts that create python virtual environments for different task types: +3. Search for available scripts that create python virtual environments for different task types: ```bash find external/ -name init_venv.sh ``` @@ -38,7 +34,7 @@ Each line in the output gives an `init_venv.sh` script that creates a virtual environment for the corresponding task type. -5. Let's choose a task type. +4. Let's choose a task type. Let it be `external/mmdetection` for Object Detection task. ```bash TASK_ALGO_DIR=./external/mmdetection/ @@ -46,7 +42,7 @@ Note that we will not use the variable `TASK_ALGO_DIR` inside our scripts, we set it just to simplify this guide. -6. Let's create, activate virtual environment for the chosen task, and install `ote_cli`. +5. Let's create, activate virtual environment for the chosen task, and install `ote_cli`. Note that the virtual environment folder may be created in any place in your system, but we will create it in the folder `./cur_task_venv` for convenience. ```bash @@ -62,7 +58,7 @@ from the chosen task folder is used to avoid breaking constraints for the OTE task. -7. As soon as `ote_cli` is installed in the virtual environment, you can use +6. As soon as `ote_cli` is installed in the virtual environment, you can use `ote` command line interface described below to run train/eval/export/other action for templates related to the chosen task type. diff --git a/external/anomaly/init_venv.sh b/external/anomaly/init_venv.sh index 015b677c8c2..dc5f06b4080 100755 --- a/external/anomaly/init_venv.sh +++ b/external/anomaly/init_venv.sh @@ -26,11 +26,6 @@ if [[ $PYTHON_VERSION != "3.8" && $PYTHON_VERSION != "3.9" ]]; then exit 1 fi -if [[ -z $OTE_SDK_PATH ]]; then - echo "The environment variable OTE_SDK_PATH is not set -- it is required for creating virtual environment" - exit 1 -fi - cd ${work_dir} if [[ -e ${venv_dir} ]]; then @@ -109,10 +104,11 @@ else echo torchvision==${TORCHVISION_VERSION}+cu${CUDA_VERSION_CODE} >> ${CONSTRAINTS_FILE} fi -pip install -r requirements.txt -pip install -e . +pip install -r requirements.txt || exit 1 +pip install -e . || exit 1 -pip install -e $OTE_SDK_PATH || exit 1 +# Install OTE SDK +pip install -e ../../ote_sdk/ || exit 1 deactivate diff --git a/tests/ote_cli/external/anomaly/test_ote_cli_tools_anomaly_classification.py b/external/anomaly/tests/ote_cli/test_anomaly_classification.py similarity index 87% rename from tests/ote_cli/external/anomaly/test_ote_cli_tools_anomaly_classification.py rename to external/anomaly/tests/ote_cli/test_anomaly_classification.py index 7e49d819218..76fa1cfbc94 100644 --- a/tests/ote_cli/external/anomaly/test_ote_cli_tools_anomaly_classification.py +++ b/external/anomaly/tests/ote_cli/test_anomaly_classification.py @@ -20,7 +20,7 @@ from ote_sdk.test_suite.e2e_test_system import e2e_pytest_component from ote_cli.registry import Registry -from common import ( +from ote_cli.utils.tests import ( create_venv, get_some_vars, ote_demo_deployment_testing, @@ -42,28 +42,28 @@ args = { - '--train-ann-file': 'data/anomaly/classification/train.json', - '--train-data-roots': 'data/anomaly/shapes', - '--val-ann-file': 'data/anomaly/classification/val.json', - '--val-data-roots': 'data/anomaly/shapes', - '--test-ann-files': 'data/anomaly/classification/test.json', - '--test-data-roots': 'data/anomaly/shapes', - '--input': 'data/anomaly/shapes/test/hexagon', - 'train_params': [], + "--train-ann-file": "data/anomaly/classification/train.json", + "--train-data-roots": "data/anomaly/shapes", + "--val-ann-file": "data/anomaly/classification/val.json", + "--val-data-roots": "data/anomaly/shapes", + "--test-ann-files": "data/anomaly/classification/test.json", + "--test-data-roots": "data/anomaly/shapes", + "--input": "data/anomaly/shapes/test/hexagon", + "train_params": [], } -root = '/tmp/ote_cli/' +root = "/tmp/ote_cli/" ote_dir = os.getcwd() -templates = Registry('external').filter(task_type='ANOMALY_CLASSIFICATION').templates +templates = Registry("external").filter(task_type="ANOMALY_CLASSIFICATION").templates templates_ids = [template.model_template_id for template in templates] class TestToolsAnomalyClassification: @e2e_pytest_component def test_create_venv(self): - work_dir, template_work_dir, algo_backend_dir = get_some_vars(templates[0], root) - create_venv(algo_backend_dir, work_dir, template_work_dir) + work_dir, _, algo_backend_dir = get_some_vars(templates[0], root) + create_venv(algo_backend_dir, work_dir) @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) diff --git a/tests/ote_cli/external/anomaly/test_ote_cli_tools_anomaly_detection.py b/external/anomaly/tests/ote_cli/test_anomaly_detection.py similarity index 87% rename from tests/ote_cli/external/anomaly/test_ote_cli_tools_anomaly_detection.py rename to external/anomaly/tests/ote_cli/test_anomaly_detection.py index d36ec2dd45d..e6412b169db 100644 --- a/tests/ote_cli/external/anomaly/test_ote_cli_tools_anomaly_detection.py +++ b/external/anomaly/tests/ote_cli/test_anomaly_detection.py @@ -17,7 +17,7 @@ import os import pytest -from common import ( +from ote_cli.utils.tests import ( create_venv, get_some_vars, nncf_eval_openvino_testing, @@ -41,28 +41,28 @@ from ote_cli.registry import Registry args = { - '--train-ann-file': 'data/anomaly/detection/train.json', - '--train-data-roots': 'data/anomaly/shapes', - '--val-ann-file': 'data/anomaly/detection/val.json', - '--val-data-roots': 'data/anomaly/shapes', - '--test-ann-files': 'data/anomaly/detection/test.json', - '--test-data-roots': 'data/anomaly/shapes', - '--input': 'data/anomaly/shapes/test/hexagon', - 'train_params': [], + "--train-ann-file": "data/anomaly/detection/train.json", + "--train-data-roots": "data/anomaly/shapes", + "--val-ann-file": "data/anomaly/detection/val.json", + "--val-data-roots": "data/anomaly/shapes", + "--test-ann-files": "data/anomaly/detection/test.json", + "--test-data-roots": "data/anomaly/shapes", + "--input": "data/anomaly/shapes/test/hexagon", + "train_params": [], } -root = '/tmp/ote_cli/' +root = "/tmp/ote_cli/" ote_dir = os.getcwd() -templates = Registry('external').filter(task_type='ANOMALY_DETECTION').templates +templates = Registry("external").filter(task_type="ANOMALY_DETECTION").templates templates_ids = [template.model_template_id for template in templates] class TestToolsAnomalyDetection: @e2e_pytest_component def test_create_venv(self): - work_dir, template_work_dir, algo_backend_dir = get_some_vars(templates[0], root) - create_venv(algo_backend_dir, work_dir, template_work_dir) + work_dir, _, algo_backend_dir = get_some_vars(templates[0], root) + create_venv(algo_backend_dir, work_dir) @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) diff --git a/tests/ote_cli/external/anomaly/test_ote_cli_tools_anomaly_segmentation.py b/external/anomaly/tests/ote_cli/test_anomaly_segmentation.py similarity index 87% rename from tests/ote_cli/external/anomaly/test_ote_cli_tools_anomaly_segmentation.py rename to external/anomaly/tests/ote_cli/test_anomaly_segmentation.py index 284f3ec6c29..0789816fc0b 100644 --- a/tests/ote_cli/external/anomaly/test_ote_cli_tools_anomaly_segmentation.py +++ b/external/anomaly/tests/ote_cli/test_anomaly_segmentation.py @@ -20,7 +20,7 @@ from ote_sdk.test_suite.e2e_test_system import e2e_pytest_component from ote_cli.registry import Registry -from common import ( +from ote_cli.utils.tests import ( create_venv, get_some_vars, ote_demo_deployment_testing, @@ -42,28 +42,28 @@ args = { - '--train-ann-file': 'data/anomaly/segmentation/train.json', - '--train-data-roots': 'data/anomaly/shapes', - '--val-ann-file': 'data/anomaly/segmentation/val.json', - '--val-data-roots': 'data/anomaly/shapes', - '--test-ann-files': 'data/anomaly/segmentation/test.json', - '--test-data-roots': 'data/anomaly/shapes', - '--input': 'data/anomaly/shapes/test/hexagon', - 'train_params': [], + "--train-ann-file": "data/anomaly/segmentation/train.json", + "--train-data-roots": "data/anomaly/shapes", + "--val-ann-file": "data/anomaly/segmentation/val.json", + "--val-data-roots": "data/anomaly/shapes", + "--test-ann-files": "data/anomaly/segmentation/test.json", + "--test-data-roots": "data/anomaly/shapes", + "--input": "data/anomaly/shapes/test/hexagon", + "train_params": [], } -root = '/tmp/ote_cli/' +root = "/tmp/ote_cli/" ote_dir = os.getcwd() -templates = Registry('external').filter(task_type='ANOMALY_SEGMENTATION').templates +templates = Registry("external").filter(task_type="ANOMALY_SEGMENTATION").templates templates_ids = [template.model_template_id for template in templates] class TestToolsAnomalySegmentation: @e2e_pytest_component def test_create_venv(self): - work_dir, template_work_dir, algo_backend_dir = get_some_vars(templates[0], root) - create_venv(algo_backend_dir, work_dir, template_work_dir) + work_dir, _, algo_backend_dir = get_some_vars(templates[0], root) + create_venv(algo_backend_dir, work_dir) @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) diff --git a/external/deep-object-reid/tests/conftest.py b/external/deep-object-reid/tests/conftest.py index 24f5e6883da..5e7aad70cfa 100644 --- a/external/deep-object-reid/tests/conftest.py +++ b/external/deep-object-reid/tests/conftest.py @@ -5,15 +5,14 @@ from e2e.conftest_utils import pytest_addoption as _e2e_pytest_addoption # noqa from e2e import config # noqa from e2e.utils import get_plugins_from_packages - from ote_sdk.test_suite.pytest_insertions import * - from ote_sdk.test_suite.training_tests_common import REALLIFE_USECASE_CONSTANT pytest_plugins = get_plugins_from_packages([e2e]) except ImportError: _e2e_pytest_addoption = None pass - import config - +import pytest +from ote_sdk.test_suite.pytest_insertions import * +from ote_sdk.test_suite.training_tests_common import REALLIFE_USECASE_CONSTANT pytest_plugins = get_pytest_plugins_from_ote() diff --git a/tests/ote_cli/external/deep-object-reid/test_ote_cli_tools_classification.py b/external/deep-object-reid/tests/ote_cli/test_classification.py similarity index 96% rename from tests/ote_cli/external/deep-object-reid/test_ote_cli_tools_classification.py rename to external/deep-object-reid/tests/ote_cli/test_classification.py index fbeb07716bb..bce27f1bbc1 100644 --- a/tests/ote_cli/external/deep-object-reid/test_ote_cli_tools_classification.py +++ b/external/deep-object-reid/tests/ote_cli/test_classification.py @@ -20,7 +20,7 @@ from ote_sdk.test_suite.e2e_test_system import e2e_pytest_component from ote_cli.registry import Registry -from common import ( +from ote_cli.utils.tests import ( create_venv, get_some_vars, ote_demo_deployment_testing, @@ -69,8 +69,8 @@ class TestToolsClassification: @e2e_pytest_component def test_create_venv(self): - work_dir, template_work_dir, algo_backend_dir = get_some_vars(templates[0], root) - create_venv(algo_backend_dir, work_dir, template_work_dir) + work_dir, _, algo_backend_dir = get_some_vars(templates[0], root) + create_venv(algo_backend_dir, work_dir) @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) diff --git a/tests/ote_cli/external/mmdetection/test_ote_cli_tools_detection.py b/external/mmdetection/tests/ote_cli/test_detection.py similarity index 96% rename from tests/ote_cli/external/mmdetection/test_ote_cli_tools_detection.py rename to external/mmdetection/tests/ote_cli/test_detection.py index 01d1be249db..964b1a1bed0 100644 --- a/tests/ote_cli/external/mmdetection/test_ote_cli_tools_detection.py +++ b/external/mmdetection/tests/ote_cli/test_detection.py @@ -21,7 +21,7 @@ from ote_sdk.test_suite.e2e_test_system import e2e_pytest_component from ote_cli.registry import Registry -from common import ( +from ote_cli.utils.tests import ( collect_env_vars, create_venv, get_some_vars, @@ -71,8 +71,8 @@ class TestToolsDetection: @e2e_pytest_component def test_create_venv(self): - work_dir, template_work_dir, algo_backend_dir = get_some_vars(templates[0], root) - create_venv(algo_backend_dir, work_dir, template_work_dir) + work_dir, _, algo_backend_dir = get_some_vars(templates[0], root) + create_venv(algo_backend_dir, work_dir) @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) @@ -92,7 +92,7 @@ def test_ote_eval(self, template): @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) def test_ote_eval_openvino(self, template): - ote_eval_openvino_testing(template, root, ote_dir, args, threshold=0.1) + ote_eval_openvino_testing(template, root, ote_dir, args, threshold=0.2) @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) diff --git a/tests/ote_cli/external/mmdetection/test_ote_cli_tools_instance_segmentation.py b/external/mmdetection/tests/ote_cli/test_instance_segmentation.py similarity index 95% rename from tests/ote_cli/external/mmdetection/test_ote_cli_tools_instance_segmentation.py rename to external/mmdetection/tests/ote_cli/test_instance_segmentation.py index b20deb3f0e9..34254d1ad0a 100644 --- a/tests/ote_cli/external/mmdetection/test_ote_cli_tools_instance_segmentation.py +++ b/external/mmdetection/tests/ote_cli/test_instance_segmentation.py @@ -22,7 +22,7 @@ from ote_cli.registry import Registry -from common import ( +from ote_cli.utils.tests import ( create_venv, get_some_vars, ote_demo_deployment_testing, @@ -64,8 +64,8 @@ class TestToolsInstanceSegmentation: @e2e_pytest_component def test_create_venv(self): - work_dir, template_work_dir, algo_backend_dir = get_some_vars(templates[0], root) - create_venv(algo_backend_dir, work_dir, template_work_dir) + work_dir, _, algo_backend_dir = get_some_vars(templates[0], root) + create_venv(algo_backend_dir, work_dir) @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) diff --git a/tests/ote_cli/external/mmdetection/test_ote_cli_tools_rotated_detection.py b/external/mmdetection/tests/ote_cli/test_rotated_detection.py similarity index 95% rename from tests/ote_cli/external/mmdetection/test_ote_cli_tools_rotated_detection.py rename to external/mmdetection/tests/ote_cli/test_rotated_detection.py index c3efb5dc7f1..661d5989c78 100644 --- a/tests/ote_cli/external/mmdetection/test_ote_cli_tools_rotated_detection.py +++ b/external/mmdetection/tests/ote_cli/test_rotated_detection.py @@ -22,7 +22,7 @@ from ote_cli.registry import Registry -from common import ( +from ote_cli.utils.tests import ( create_venv, get_some_vars, ote_demo_deployment_testing, @@ -64,8 +64,8 @@ class TestToolsRotatedDetection: @e2e_pytest_component def test_create_venv(self): - work_dir, template_work_dir, algo_backend_dir = get_some_vars(templates[0], root) - create_venv(algo_backend_dir, work_dir, template_work_dir) + work_dir, _, algo_backend_dir = get_some_vars(templates[0], root) + create_venv(algo_backend_dir, work_dir) @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) diff --git a/tests/ote_cli/external/mmsegmentation/test_ote_cli_tools_segmentation.py b/external/mmsegmentation/tests/ote_cli/test_segmentation.py similarity index 97% rename from tests/ote_cli/external/mmsegmentation/test_ote_cli_tools_segmentation.py rename to external/mmsegmentation/tests/ote_cli/test_segmentation.py index 33e3ea48807..bcf3e49c1a9 100644 --- a/tests/ote_cli/external/mmsegmentation/test_ote_cli_tools_segmentation.py +++ b/external/mmsegmentation/tests/ote_cli/test_segmentation.py @@ -22,7 +22,7 @@ from ote_cli.registry import Registry -from common import ( +from ote_cli.utils.tests import ( create_venv, get_some_vars, ote_demo_deployment_testing, @@ -75,8 +75,8 @@ class TestToolsSegmentation: @e2e_pytest_component def test_create_venv(self): - work_dir, template_work_dir, algo_backend_dir = get_some_vars(templates[0], root) - create_venv(algo_backend_dir, work_dir, template_work_dir) + work_dir, _, algo_backend_dir = get_some_vars(templates[0], root) + create_venv(algo_backend_dir, work_dir) @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) diff --git a/ote_cli/ote_cli/utils/tests.py b/ote_cli/ote_cli/utils/tests.py new file mode 100644 index 00000000000..d53e4a4c861 --- /dev/null +++ b/ote_cli/ote_cli/utils/tests.py @@ -0,0 +1,625 @@ +# Copyright (C) 2021 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions +# and limitations under the License. + +import json +import os +import shutil +from subprocess import run # nosec + +import pytest + + +def get_template_rel_dir(template): + return os.path.dirname(os.path.relpath(template.model_template_path)) + + +def get_some_vars(template, root): + template_dir = get_template_rel_dir(template) + algo_backend_dir = "/".join(template_dir.split("/")[:2]) + work_dir = os.path.join(root, os.path.basename(algo_backend_dir)) + template_work_dir = os.path.join(work_dir, template_dir) + os.makedirs(template_work_dir, exist_ok=True) + return work_dir, template_work_dir, algo_backend_dir + + +def create_venv(algo_backend_dir, work_dir): + venv_dir = f"{work_dir}/venv" + if not os.path.exists(venv_dir): + assert run([f"./{algo_backend_dir}/init_venv.sh", venv_dir]).returncode == 0 + assert ( + run( + [f"{work_dir}/venv/bin/python", "-m", "pip", "install", "-e", "ote_cli"] + ).returncode + == 0 + ) + + +def extract_export_vars(path): + vars = {} + with open(path) as f: + for line in f: + line = line.strip() + if line.startswith("export ") and "=" in line: + line = line.replace("export ", "").split("=") + assert len(line) == 2 + vars[line[0].strip()] = line[1].strip() + return vars + + +def collect_env_vars(work_dir): + vars = extract_export_vars(f"{work_dir}/venv/bin/activate") + vars.update({"PATH": f"{work_dir}/venv/bin/:" + os.environ["PATH"]}) + if "HTTP_PROXY" in os.environ: + vars.update({"HTTP_PROXY": os.environ["HTTP_PROXY"]}) + if "HTTPS_PROXY" in os.environ: + vars.update({"HTTPS_PROXY": os.environ["HTTPS_PROXY"]}) + if "NO_PROXY" in os.environ: + vars.update({"NO_PROXY": os.environ["NO_PROXY"]}) + return vars + + +def patch_demo_py(src_path, dst_path): + with open(src_path) as read_file: + content = [line for line in read_file] + replaced = False + for i, line in enumerate(content): + if "visualizer = Visualizer(media_type)" in line: + content[i] = line.rstrip() + "; visualizer.show = show\n" + replaced = True + assert replaced + content = ["def show(self):\n", " pass\n\n"] + content + with open(dst_path, "w") as write_file: + write_file.write("".join(content)) + + +def remove_ote_sdk_from_requirements(path): + with open(path, encoding="UTF-8") as read_file: + content = "".join([line for line in read_file if "ote_sdk" not in line]) + + with open(path, "w", encoding="UTF-8") as write_file: + write_file.write(content) + + +def ote_train_testing(template, root, ote_dir, args): + work_dir, template_work_dir, _ = get_some_vars(template, root) + command_line = [ + "ote", + "train", + template.model_template_id, + "--train-ann-file", + f'{os.path.join(ote_dir, args["--train-ann-file"])}', + "--train-data-roots", + f'{os.path.join(ote_dir, args["--train-data-roots"])}', + "--val-ann-file", + f'{os.path.join(ote_dir, args["--val-ann-file"])}', + "--val-data-roots", + f'{os.path.join(ote_dir, args["--val-data-roots"])}', + "--save-model-to", + f"{template_work_dir}/trained_{template.model_template_id}", + ] + command_line.extend(args["train_params"]) + assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 + assert os.path.exists( + f"{template_work_dir}/trained_{template.model_template_id}/weights.pth" + ) + assert os.path.exists( + f"{template_work_dir}/trained_{template.model_template_id}/label_schema.json" + ) + + +def ote_hpo_testing(template, root, ote_dir, args): + work_dir, template_work_dir, _ = get_some_vars(template, root) + if os.path.exists(f"{template_work_dir}/hpo"): + shutil.rmtree(f"{template_work_dir}/hpo") + command_line = [ + "ote", + "train", + template.model_template_id, + "--train-ann-file", + f'{os.path.join(ote_dir, args["--train-ann-file"])}', + "--train-data-roots", + f'{os.path.join(ote_dir, args["--train-data-roots"])}', + "--val-ann-file", + f'{os.path.join(ote_dir, args["--val-ann-file"])}', + "--val-data-roots", + f'{os.path.join(ote_dir, args["--val-data-roots"])}', + "--save-model-to", + f"{template_work_dir}/hpo_trained_{template.model_template_id}", + "--enable-hpo", + "--hpo-time-ratio", + "1", + ] + command_line.extend(args["train_params"]) + assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 + assert os.path.exists(f"{template_work_dir}/hpo/hpopt_status.json") + with open(f"{template_work_dir}/hpo/hpopt_status.json", "r") as f: + assert json.load(f).get("best_config_id", None) is not None + assert os.path.exists( + f"{template_work_dir}/hpo_trained_{template.model_template_id}/weights.pth" + ) + assert os.path.exists( + f"{template_work_dir}/hpo_trained_{template.model_template_id}/label_schema.json" + ) + + +def ote_export_testing(template, root): + work_dir, template_work_dir, _ = get_some_vars(template, root) + command_line = [ + "ote", + "export", + template.model_template_id, + "--load-weights", + f"{template_work_dir}/trained_{template.model_template_id}/weights.pth", + "--save-model-to", + f"{template_work_dir}/exported_{template.model_template_id}", + ] + assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 + assert os.path.exists( + f"{template_work_dir}/exported_{template.model_template_id}/openvino.xml" + ) + assert os.path.exists( + f"{template_work_dir}/exported_{template.model_template_id}/openvino.bin" + ) + assert os.path.exists( + f"{template_work_dir}/exported_{template.model_template_id}/label_schema.json" + ) + + +def ote_eval_testing(template, root, ote_dir, args): + work_dir, template_work_dir, _ = get_some_vars(template, root) + command_line = [ + "ote", + "eval", + template.model_template_id, + "--test-ann-file", + f'{os.path.join(ote_dir, args["--test-ann-files"])}', + "--test-data-roots", + f'{os.path.join(ote_dir, args["--test-data-roots"])}', + "--load-weights", + f"{template_work_dir}/trained_{template.model_template_id}/weights.pth", + "--save-performance", + f"{template_work_dir}/trained_{template.model_template_id}/performance.json", + ] + assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 + assert os.path.exists( + f"{template_work_dir}/trained_{template.model_template_id}/performance.json" + ) + + +def ote_eval_openvino_testing(template, root, ote_dir, args, threshold): + work_dir, template_work_dir, _ = get_some_vars(template, root) + command_line = [ + "ote", + "eval", + template.model_template_id, + "--test-ann-file", + f'{os.path.join(ote_dir, args["--test-ann-files"])}', + "--test-data-roots", + f'{os.path.join(ote_dir, args["--test-data-roots"])}', + "--load-weights", + f"{template_work_dir}/exported_{template.model_template_id}/openvino.xml", + "--save-performance", + f"{template_work_dir}/exported_{template.model_template_id}/performance.json", + ] + assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 + assert os.path.exists( + f"{template_work_dir}/exported_{template.model_template_id}/performance.json" + ) + with open( + f"{template_work_dir}/trained_{template.model_template_id}/performance.json" + ) as read_file: + trained_performance = json.load(read_file) + with open( + f"{template_work_dir}/exported_{template.model_template_id}/performance.json" + ) as read_file: + exported_performance = json.load(read_file) + + for k in trained_performance.keys(): + assert ( + abs(trained_performance[k] - exported_performance[k]) + / (trained_performance[k] + 1e-10) + <= threshold + ), f"{trained_performance[k]=}, {exported_performance[k]=}" + + +def ote_demo_testing(template, root, ote_dir, args): + work_dir, template_work_dir, _ = get_some_vars(template, root) + command_line = [ + "ote", + "demo", + template.model_template_id, + "--load-weights", + f"{template_work_dir}/trained_{template.model_template_id}/weights.pth", + "--input", + os.path.join(ote_dir, args["--input"]), + "--delay", + "-1", + ] + assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 + + +def ote_demo_openvino_testing(template, root, ote_dir, args): + work_dir, template_work_dir, _ = get_some_vars(template, root) + command_line = [ + "ote", + "demo", + template.model_template_id, + "--load-weights", + f"{template_work_dir}/exported_{template.model_template_id}/openvino.xml", + "--input", + os.path.join(ote_dir, args["--input"]), + "--delay", + "-1", + ] + assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 + + +def ote_deploy_openvino_testing(template, root, ote_dir, args): + work_dir, template_work_dir, _ = get_some_vars(template, root) + deployment_dir = f"{template_work_dir}/deployed_{template.model_template_id}" + command_line = [ + "ote", + "deploy", + template.model_template_id, + "--load-weights", + f"{template_work_dir}/exported_{template.model_template_id}/openvino.xml", + "--save-model-to", + deployment_dir, + ] + assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 + assert run(["unzip", "openvino.zip"], cwd=deployment_dir).returncode == 0 + assert ( + run( + ["python3", "-m", "venv", "venv"], + cwd=os.path.join(deployment_dir, "python"), + ).returncode + == 0 + ) + assert ( + run( + ["python3", "-m", "pip", "install", "wheel"], + cwd=os.path.join(deployment_dir, "python"), + env=collect_env_vars(os.path.join(deployment_dir, "python")), + ).returncode + == 0 + ) + + # Remove ote_sdk from requirements.txt, since merge commit (that is created on CI) + # is not pushed to github and that's why cannot be cloned. + # Install ote_sdk from local folder instead. + # Install the demo_package with --no-deps since, requirements.txt + # has been embedded to the demo_package during creation. + remove_ote_sdk_from_requirements( + os.path.join(deployment_dir, "python", "requirements.txt") + ) + assert ( + run( + ["python3", "-m", "pip", "install", "pip", "--upgrade"], + cwd=os.path.join(deployment_dir, "python"), + env=collect_env_vars(os.path.join(deployment_dir, "python")), + ).returncode + == 0 + ) + assert ( + run( + [ + "python3", + "-m", + "pip", + "install", + "-e", + os.path.join(os.path.dirname(__file__), "..", "..", "..", "ote_sdk"), + ], + cwd=os.path.join(deployment_dir, "python"), + env=collect_env_vars(os.path.join(deployment_dir, "python")), + ).returncode + == 0 + ) + assert ( + run( + [ + "python3", + "-m", + "pip", + "install", + "-r", + os.path.join(deployment_dir, "python", "requirements.txt"), + ], + cwd=os.path.join(deployment_dir, "python"), + env=collect_env_vars(os.path.join(deployment_dir, "python")), + ).returncode + == 0 + ) + assert ( + run( + [ + "python3", + "-m", + "pip", + "install", + "demo_package-0.0-py3-none-any.whl", + "--no-deps", + ], + cwd=os.path.join(deployment_dir, "python"), + env=collect_env_vars(os.path.join(deployment_dir, "python")), + ).returncode + == 0 + ) + + # Patch demo since we are not able to run cv2.imshow on CI. + patch_demo_py( + os.path.join(deployment_dir, "python", "demo.py"), + os.path.join(deployment_dir, "python", "demo_patched.py"), + ) + + assert ( + run( + [ + "python3", + "demo_patched.py", + "-m", + "../model/model.xml", + "-i", + os.path.join(ote_dir, args["--input"]), + ], + cwd=os.path.join(deployment_dir, "python"), + env=collect_env_vars(os.path.join(deployment_dir, "python")), + ).returncode + == 0 + ) + + +def ote_eval_deployment_testing(template, root, ote_dir, args, threshold): + work_dir, template_work_dir, _ = get_some_vars(template, root) + command_line = [ + "ote", + "eval", + template.model_template_id, + "--test-ann-file", + f'{os.path.join(ote_dir, args["--test-ann-files"])}', + "--test-data-roots", + f'{os.path.join(ote_dir, args["--test-data-roots"])}', + "--load-weights", + f"{template_work_dir}/deployed_{template.model_template_id}/openvino.zip", + "--save-performance", + f"{template_work_dir}/deployed_{template.model_template_id}/performance.json", + ] + assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 + assert os.path.exists( + f"{template_work_dir}/deployed_{template.model_template_id}/performance.json" + ) + with open( + f"{template_work_dir}/exported_{template.model_template_id}/performance.json" + ) as read_file: + exported_performance = json.load(read_file) + with open( + f"{template_work_dir}/deployed_{template.model_template_id}/performance.json" + ) as read_file: + deployed_performance = json.load(read_file) + + for k in exported_performance.keys(): + assert ( + abs(exported_performance[k] - deployed_performance[k]) + / (exported_performance[k] + 1e-10) + <= threshold + ), f"{exported_performance[k]=}, {deployed_performance[k]=}" + + +def ote_demo_deployment_testing(template, root, ote_dir, args): + work_dir, template_work_dir, _ = get_some_vars(template, root) + command_line = [ + "ote", + "demo", + template.model_template_id, + "--load-weights", + f"{template_work_dir}/deployed_{template.model_template_id}/openvino.zip", + "--input", + os.path.join(ote_dir, args["--input"]), + "--delay", + "-1", + ] + assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 + + +def pot_optimize_testing(template, root, ote_dir, args): + work_dir, template_work_dir, _ = get_some_vars(template, root) + command_line = [ + "ote", + "optimize", + template.model_template_id, + "--train-ann-file", + f'{os.path.join(ote_dir, args["--train-ann-file"])}', + "--train-data-roots", + f'{os.path.join(ote_dir, args["--train-data-roots"])}', + "--val-ann-file", + f'{os.path.join(ote_dir, args["--val-ann-file"])}', + "--val-data-roots", + f'{os.path.join(ote_dir, args["--val-data-roots"])}', + "--load-weights", + f"{template_work_dir}/exported_{template.model_template_id}/openvino.xml", + "--save-model-to", + f"{template_work_dir}/pot_{template.model_template_id}", + ] + assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 + assert os.path.exists( + f"{template_work_dir}/pot_{template.model_template_id}/openvino.xml" + ) + assert os.path.exists( + f"{template_work_dir}/pot_{template.model_template_id}/openvino.bin" + ) + assert os.path.exists( + f"{template_work_dir}/pot_{template.model_template_id}/label_schema.json" + ) + + +def pot_eval_testing(template, root, ote_dir, args): + work_dir, template_work_dir, _ = get_some_vars(template, root) + command_line = [ + "ote", + "eval", + template.model_template_id, + "--test-ann-file", + f'{os.path.join(ote_dir, args["--test-ann-files"])}', + "--test-data-roots", + f'{os.path.join(ote_dir, args["--test-data-roots"])}', + "--load-weights", + f"{template_work_dir}/pot_{template.model_template_id}/openvino.xml", + "--save-performance", + f"{template_work_dir}/pot_{template.model_template_id}/performance.json", + ] + assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 + assert os.path.exists( + f"{template_work_dir}/pot_{template.model_template_id}/performance.json" + ) + + +def nncf_optimize_testing(template, root, ote_dir, args): + work_dir, template_work_dir, _ = get_some_vars(template, root) + command_line = [ + "ote", + "optimize", + template.model_template_id, + "--train-ann-file", + f'{os.path.join(ote_dir, args["--train-ann-file"])}', + "--train-data-roots", + f'{os.path.join(ote_dir, args["--train-data-roots"])}', + "--val-ann-file", + f'{os.path.join(ote_dir, args["--val-ann-file"])}', + "--val-data-roots", + f'{os.path.join(ote_dir, args["--val-data-roots"])}', + "--load-weights", + f"{template_work_dir}/trained_{template.model_template_id}/weights.pth", + "--save-model-to", + f"{template_work_dir}/nncf_{template.model_template_id}", + "--save-performance", + f"{template_work_dir}/nncf_{template.model_template_id}/train_performance.json", + ] + command_line.extend(args["train_params"]) + assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 + assert os.path.exists( + f"{template_work_dir}/nncf_{template.model_template_id}/weights.pth" + ) + assert os.path.exists( + f"{template_work_dir}/nncf_{template.model_template_id}/label_schema.json" + ) + + +def nncf_export_testing(template, root): + work_dir, template_work_dir, _ = get_some_vars(template, root) + command_line = [ + "ote", + "export", + template.model_template_id, + "--load-weights", + f"{template_work_dir}/nncf_{template.model_template_id}/weights.pth", + "--save-model-to", + f"{template_work_dir}/exported_nncf_{template.model_template_id}", + ] + assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 + assert os.path.exists( + f"{template_work_dir}/exported_nncf_{template.model_template_id}/openvino.xml" + ) + assert os.path.exists( + f"{template_work_dir}/exported_nncf_{template.model_template_id}/openvino.bin" + ) + assert os.path.exists( + f"{template_work_dir}/exported_nncf_{template.model_template_id}/label_schema.json" + ) + original_bin_size = os.path.getsize( + f"{template_work_dir}/exported_{template.model_template_id}/openvino.bin" + ) + compressed_bin_size = os.path.getsize( + f"{template_work_dir}/exported_nncf_{template.model_template_id}/openvino.bin" + ) + assert ( + compressed_bin_size < original_bin_size + ), f"{compressed_bin_size=}, {original_bin_size=}" + + +def nncf_eval_testing(template, root, ote_dir, args, threshold): + work_dir, template_work_dir, _ = get_some_vars(template, root) + command_line = [ + "ote", + "eval", + template.model_template_id, + "--test-ann-file", + f'{os.path.join(ote_dir, args["--test-ann-files"])}', + "--test-data-roots", + f'{os.path.join(ote_dir, args["--test-data-roots"])}', + "--load-weights", + f"{template_work_dir}/nncf_{template.model_template_id}/weights.pth", + "--save-performance", + f"{template_work_dir}/nncf_{template.model_template_id}/performance.json", + ] + assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 + assert os.path.exists( + f"{template_work_dir}/nncf_{template.model_template_id}/performance.json" + ) + with open( + f"{template_work_dir}/nncf_{template.model_template_id}/train_performance.json" + ) as read_file: + trained_performance = json.load(read_file) + with open( + f"{template_work_dir}/nncf_{template.model_template_id}/performance.json" + ) as read_file: + evaluated_performance = json.load(read_file) + + for k in trained_performance.keys(): + assert ( + abs(trained_performance[k] - evaluated_performance[k]) + / (trained_performance[k] + 1e-10) + <= threshold + ), f"{trained_performance[k]=}, {evaluated_performance[k]=}" + + +def nncf_eval_openvino_testing(template, root, ote_dir, args): + work_dir, template_work_dir, _ = get_some_vars(template, root) + command_line = [ + "ote", + "eval", + template.model_template_id, + "--test-ann-file", + f'{os.path.join(ote_dir, args["--test-ann-files"])}', + "--test-data-roots", + f'{os.path.join(ote_dir, args["--test-data-roots"])}', + "--load-weights", + f"{template_work_dir}/exported_nncf_{template.model_template_id}/openvino.xml", + "--save-performance", + f"{template_work_dir}/exported_nncf_{template.model_template_id}/performance.json", + ] + assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 + assert os.path.exists( + f"{template_work_dir}/exported_nncf_{template.model_template_id}/performance.json" + ) + + +def xfail_templates(templates, xfail_template_ids_reasons): + xfailed_templates = [] + for template in templates: + reasons = [ + reason + for template_id, reason in xfail_template_ids_reasons + if template_id == template.model_template_id + ] + if len(reasons) == 0: + xfailed_templates.append(template) + elif len(reasons) == 1: + xfailed_templates.append( + pytest.param(template, marks=pytest.mark.xfail(reason=reasons[0])) + ) + else: + raise RuntimeError( + "More than one reason for template. If you have more than one Jira tickets, list them in one reason." + ) + return xfailed_templates diff --git a/tests/ote_cli/common.py b/tests/ote_cli/common.py deleted file mode 100644 index 3c29dd5a32b..00000000000 --- a/tests/ote_cli/common.py +++ /dev/null @@ -1,441 +0,0 @@ -# Copyright (C) 2021 Intel Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions -# and limitations under the License. - -import json -import os -import shutil -from subprocess import run # nosec - - -def get_template_rel_dir(template): - return os.path.dirname(os.path.relpath(template.model_template_path)) - - -def get_some_vars(template, root): - template_dir = get_template_rel_dir(template) - algo_backend_dir = '/'.join(template_dir.split('/')[:2]) - work_dir = os.path.join(root, os.path.basename(algo_backend_dir)) - template_work_dir = os.path.join(work_dir, template_dir) - os.makedirs(template_work_dir, exist_ok=True) - return work_dir, template_work_dir, algo_backend_dir - - -def create_venv(algo_backend_dir, work_dir, template_work_dir): - venv_dir = f'{work_dir}/venv' - if not os.path.exists(venv_dir): - assert run([f'./{algo_backend_dir}/init_venv.sh', venv_dir]).returncode == 0 - assert run([f'{work_dir}/venv/bin/python', '-m', 'pip', 'install', '-e', 'ote_cli']).returncode == 0 - - -def extract_export_vars(path): - vars = {} - with open(path) as f: - for line in f: - line = line.strip() - if line.startswith('export ') and '=' in line: - line = line.replace('export ', '').split('=') - assert len(line) == 2 - vars[line[0].strip()] = line[1].strip() - return vars - - -def collect_env_vars(work_dir): - vars = extract_export_vars(f'{work_dir}/venv/bin/activate') - vars.update({'PATH':f'{work_dir}/venv/bin/:' + os.environ['PATH']}) - if 'HTTP_PROXY' in os.environ: - vars.update({'HTTP_PROXY': os.environ['HTTP_PROXY']}) - if 'HTTPS_PROXY' in os.environ: - vars.update({'HTTPS_PROXY': os.environ['HTTPS_PROXY']}) - if 'NO_PROXY' in os.environ: - vars.update({'NO_PROXY': os.environ['NO_PROXY']}) - if 'OTE_SDK_PATH' in os.environ: - vars.update({'OTE_SDK_PATH': os.environ['OTE_SDK_PATH']}) - return vars - - -def patch_demo_py(src_path, dst_path): - with open(src_path) as read_file: - content = [line for line in read_file] - replaced = False - for i, line in enumerate(content): - if 'visualizer = Visualizer(media_type)' in line: - content[i] = line.rstrip() + '; visualizer.show = show\n' - replaced = True - assert replaced - content = ['def show(self):\n', ' pass\n\n'] + content - with open(dst_path, 'w') as write_file: - write_file.write(''.join(content)) - - -def remove_ote_sdk_from_requirements(path): - with open(path, encoding='UTF-8') as read_file: - content = ''.join([line for line in read_file if 'ote_sdk' not in line]) - - with open(path, 'w', encoding='UTF-8') as write_file: - write_file.write(content) - - -def ote_train_testing(template, root, ote_dir, args): - work_dir, template_work_dir, _ = get_some_vars(template, root) - command_line = ['ote', - 'train', - template.model_template_id, - '--train-ann-file', - f'{os.path.join(ote_dir, args["--train-ann-file"])}', - '--train-data-roots', - f'{os.path.join(ote_dir, args["--train-data-roots"])}', - '--val-ann-file', - f'{os.path.join(ote_dir, args["--val-ann-file"])}', - '--val-data-roots', - f'{os.path.join(ote_dir, args["--val-data-roots"])}', - '--save-model-to', - f'{template_work_dir}/trained_{template.model_template_id}'] - command_line.extend(args['train_params']) - assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 - assert os.path.exists(f'{template_work_dir}/trained_{template.model_template_id}/weights.pth') - assert os.path.exists(f'{template_work_dir}/trained_{template.model_template_id}/label_schema.json') - - -def ote_hpo_testing(template, root, ote_dir, args): - work_dir, template_work_dir, _ = get_some_vars(template, root) - if os.path.exists(f"{template_work_dir}/hpo"): - shutil.rmtree(f"{template_work_dir}/hpo") - command_line = ['ote', - 'train', - template.model_template_id, - '--train-ann-file', - f'{os.path.join(ote_dir, args["--train-ann-file"])}', - '--train-data-roots', - f'{os.path.join(ote_dir, args["--train-data-roots"])}', - '--val-ann-file', - f'{os.path.join(ote_dir, args["--val-ann-file"])}', - '--val-data-roots', - f'{os.path.join(ote_dir, args["--val-data-roots"])}', - '--save-model-to', - f'{template_work_dir}/hpo_trained_{template.model_template_id}', - '--enable-hpo', - '--hpo-time-ratio', - '1'] - command_line.extend(args['train_params']) - assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 - assert os.path.exists(f"{template_work_dir}/hpo/hpopt_status.json") - with open(f"{template_work_dir}/hpo/hpopt_status.json", "r") as f: - assert json.load(f).get('best_config_id', None) is not None - assert os.path.exists(f'{template_work_dir}/hpo_trained_{template.model_template_id}/weights.pth') - assert os.path.exists(f'{template_work_dir}/hpo_trained_{template.model_template_id}/label_schema.json') - - -def ote_export_testing(template, root): - work_dir, template_work_dir, _ = get_some_vars(template, root) - command_line = ['ote', - 'export', - template.model_template_id, - '--load-weights', - f'{template_work_dir}/trained_{template.model_template_id}/weights.pth', - f'--save-model-to', - f'{template_work_dir}/exported_{template.model_template_id}'] - assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 - assert os.path.exists(f'{template_work_dir}/exported_{template.model_template_id}/openvino.xml') - assert os.path.exists(f'{template_work_dir}/exported_{template.model_template_id}/openvino.bin') - assert os.path.exists(f'{template_work_dir}/exported_{template.model_template_id}/label_schema.json') - - -def ote_eval_testing(template, root, ote_dir, args): - work_dir, template_work_dir, _ = get_some_vars(template, root) - command_line = ['ote', - 'eval', - template.model_template_id, - '--test-ann-file', - f'{os.path.join(ote_dir, args["--test-ann-files"])}', - '--test-data-roots', - f'{os.path.join(ote_dir, args["--test-data-roots"])}', - '--load-weights', - f'{template_work_dir}/trained_{template.model_template_id}/weights.pth', - '--save-performance', - f'{template_work_dir}/trained_{template.model_template_id}/performance.json'] - assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 - assert os.path.exists(f'{template_work_dir}/trained_{template.model_template_id}/performance.json') - - -def ote_eval_openvino_testing(template, root, ote_dir, args, threshold): - work_dir, template_work_dir, _ = get_some_vars(template, root) - command_line = ['ote', - 'eval', - template.model_template_id, - '--test-ann-file', - f'{os.path.join(ote_dir, args["--test-ann-files"])}', - '--test-data-roots', - f'{os.path.join(ote_dir, args["--test-data-roots"])}', - '--load-weights', - f'{template_work_dir}/exported_{template.model_template_id}/openvino.xml', - '--save-performance', - f'{template_work_dir}/exported_{template.model_template_id}/performance.json'] - assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 - assert os.path.exists(f'{template_work_dir}/exported_{template.model_template_id}/performance.json') - with open(f'{template_work_dir}/trained_{template.model_template_id}/performance.json') as read_file: - trained_performance = json.load(read_file) - with open(f'{template_work_dir}/exported_{template.model_template_id}/performance.json') as read_file: - exported_performance = json.load(read_file) - - for k in trained_performance.keys(): - assert ( - abs(trained_performance[k] - exported_performance[k]) - / (trained_performance[k] + 1e-10) - <= threshold - ), f"{trained_performance[k]=}, {exported_performance[k]=}" - - -def ote_demo_testing(template, root, ote_dir, args): - work_dir, template_work_dir, _ = get_some_vars(template, root) - command_line = ['ote', - 'demo', - template.model_template_id, - '--load-weights', - f'{template_work_dir}/trained_{template.model_template_id}/weights.pth', - '--input', - os.path.join(ote_dir, args['--input']), - '--delay', - '-1'] - assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 - - -def ote_demo_openvino_testing(template, root, ote_dir, args): - work_dir, template_work_dir, _ = get_some_vars(template, root) - command_line = ['ote', - 'demo', - template.model_template_id, - '--load-weights', - f'{template_work_dir}/exported_{template.model_template_id}/openvino.xml', - '--input', - os.path.join(ote_dir, args['--input']), - '--delay', - '-1'] - assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 - - -def ote_deploy_openvino_testing(template, root, ote_dir, args): - work_dir, template_work_dir, _ = get_some_vars(template, root) - deployment_dir = f'{template_work_dir}/deployed_{template.model_template_id}' - command_line = ['ote', - 'deploy', - template.model_template_id, - '--load-weights', - f'{template_work_dir}/exported_{template.model_template_id}/openvino.xml', - f'--save-model-to', - deployment_dir] - assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 - assert run(['unzip', 'openvino.zip'], - cwd=deployment_dir).returncode == 0 - assert run(['python3', '-m', 'venv', 'venv'], - cwd=os.path.join(deployment_dir, 'python')).returncode == 0 - assert run(['python3', '-m', 'pip', 'install', 'wheel'], - cwd=os.path.join(deployment_dir, 'python'), - env=collect_env_vars(os.path.join(deployment_dir, 'python'))).returncode == 0 - - # Remove ote_sdk from requirements.txt, since merge commit (that is created on CI) is not pushed to github and that's why cannot be cloned. - # Install ote_sdk from local folder instead. - # Install the demo_package with --no-deps since, requirements.txt has been embedded to the demo_package during creation. - remove_ote_sdk_from_requirements(os.path.join(deployment_dir, 'python', 'requirements.txt')) - assert run(['python3', '-m', 'pip', 'install', 'pip', '--upgrade'], - cwd=os.path.join(deployment_dir, 'python'), - env=collect_env_vars(os.path.join(deployment_dir, 'python'))).returncode == 0 - assert run(['python3', '-m', 'pip', 'install', '-e', os.path.join(os.path.dirname(__file__), '..', '..', 'ote_sdk')], - cwd=os.path.join(deployment_dir, 'python'), - env=collect_env_vars(os.path.join(deployment_dir, 'python'))).returncode == 0 - assert run(['python3', '-m', 'pip', 'install', '-r', os.path.join(deployment_dir, 'python', 'requirements.txt')], - cwd=os.path.join(deployment_dir, 'python'), - env=collect_env_vars(os.path.join(deployment_dir, 'python'))).returncode == 0 - assert run(['python3', '-m', 'pip', 'install', 'demo_package-0.0-py3-none-any.whl', '--no-deps'], - cwd=os.path.join(deployment_dir, 'python'), - env=collect_env_vars(os.path.join(deployment_dir, 'python'))).returncode == 0 - - # Patch demo since we are not able to run cv2.imshow on CI. - patch_demo_py(os.path.join(deployment_dir, 'python', 'demo.py'), - os.path.join(deployment_dir, 'python', 'demo_patched.py')) - - assert run(['python3', 'demo_patched.py', '-m', '../model/model.xml', '-i', os.path.join(ote_dir, args['--input'])], - cwd=os.path.join(deployment_dir, 'python'), - env=collect_env_vars(os.path.join(deployment_dir, 'python'))).returncode == 0 - - -def ote_eval_deployment_testing(template, root, ote_dir, args, threshold): - work_dir, template_work_dir, _ = get_some_vars(template, root) - command_line = ['ote', - 'eval', - template.model_template_id, - '--test-ann-file', - f'{os.path.join(ote_dir, args["--test-ann-files"])}', - '--test-data-roots', - f'{os.path.join(ote_dir, args["--test-data-roots"])}', - '--load-weights', - f'{template_work_dir}/deployed_{template.model_template_id}/openvino.zip', - '--save-performance', - f'{template_work_dir}/deployed_{template.model_template_id}/performance.json'] - assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 - assert os.path.exists(f'{template_work_dir}/deployed_{template.model_template_id}/performance.json') - with open(f'{template_work_dir}/exported_{template.model_template_id}/performance.json') as read_file: - exported_performance = json.load(read_file) - with open(f'{template_work_dir}/deployed_{template.model_template_id}/performance.json') as read_file: - deployed_performance = json.load(read_file) - - for k in exported_performance.keys(): - assert ( - abs(exported_performance[k] - deployed_performance[k]) - / (exported_performance[k] + 1e-10) - <= threshold - ), f"{exported_performance[k]=}, {deployed_performance[k]=}" - - -def ote_demo_deployment_testing(template, root, ote_dir, args): - work_dir, template_work_dir, _ = get_some_vars(template, root) - command_line = ['ote', - 'demo', - template.model_template_id, - '--load-weights', - f'{template_work_dir}/deployed_{template.model_template_id}/openvino.zip', - '--input', - os.path.join(ote_dir, args['--input']), - '--delay', - '-1'] - assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 - - -def pot_optimize_testing(template, root, ote_dir, args): - work_dir, template_work_dir, algo_backend_dir = get_some_vars(template, root) - command_line = ['ote', - 'optimize', - template.model_template_id, - '--train-ann-file', - f'{os.path.join(ote_dir, args["--train-ann-file"])}', - '--train-data-roots', - f'{os.path.join(ote_dir, args["--train-data-roots"])}', - '--val-ann-file', - f'{os.path.join(ote_dir, args["--val-ann-file"])}', - '--val-data-roots', - f'{os.path.join(ote_dir, args["--val-data-roots"])}', - '--load-weights', - f'{template_work_dir}/exported_{template.model_template_id}/openvino.xml', - '--save-model-to', - f'{template_work_dir}/pot_{template.model_template_id}', - ] - assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 - assert os.path.exists(f'{template_work_dir}/pot_{template.model_template_id}/openvino.xml') - assert os.path.exists(f'{template_work_dir}/pot_{template.model_template_id}/openvino.bin') - assert os.path.exists(f'{template_work_dir}/pot_{template.model_template_id}/label_schema.json') - - -def pot_eval_testing(template, root, ote_dir, args): - work_dir, template_work_dir, _ = get_some_vars(template, root) - command_line = ['ote', - 'eval', - template.model_template_id, - '--test-ann-file', - f'{os.path.join(ote_dir, args["--test-ann-files"])}', - '--test-data-roots', - f'{os.path.join(ote_dir, args["--test-data-roots"])}', - '--load-weights', - f'{template_work_dir}/pot_{template.model_template_id}/openvino.xml', - '--save-performance', - f'{template_work_dir}/pot_{template.model_template_id}/performance.json'] - assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 - assert os.path.exists(f'{template_work_dir}/pot_{template.model_template_id}/performance.json') - - -def nncf_optimize_testing(template, root, ote_dir, args): - work_dir, template_work_dir, algo_backend_dir = get_some_vars(template, root) - command_line = ['ote', - 'optimize', - template.model_template_id, - '--train-ann-file', - f'{os.path.join(ote_dir, args["--train-ann-file"])}', - '--train-data-roots', - f'{os.path.join(ote_dir, args["--train-data-roots"])}', - '--val-ann-file', - f'{os.path.join(ote_dir, args["--val-ann-file"])}', - '--val-data-roots', - f'{os.path.join(ote_dir, args["--val-data-roots"])}', - '--load-weights', - f'{template_work_dir}/trained_{template.model_template_id}/weights.pth', - '--save-model-to', - f'{template_work_dir}/nncf_{template.model_template_id}', - '--save-performance', - f'{template_work_dir}/nncf_{template.model_template_id}/train_performance.json', - ] - command_line.extend(args['train_params']) - assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 - assert os.path.exists(f'{template_work_dir}/nncf_{template.model_template_id}/weights.pth') - assert os.path.exists(f'{template_work_dir}/nncf_{template.model_template_id}/label_schema.json') - - -def nncf_export_testing(template, root): - work_dir, template_work_dir, _ = get_some_vars(template, root) - command_line = ['ote', - 'export', - template.model_template_id, - '--load-weights', - f'{template_work_dir}/nncf_{template.model_template_id}/weights.pth', - f'--save-model-to', - f'{template_work_dir}/exported_nncf_{template.model_template_id}'] - assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 - assert os.path.exists(f'{template_work_dir}/exported_nncf_{template.model_template_id}/openvino.xml') - assert os.path.exists(f'{template_work_dir}/exported_nncf_{template.model_template_id}/openvino.bin') - assert os.path.exists(f'{template_work_dir}/exported_nncf_{template.model_template_id}/label_schema.json') - original_bin_size = os.path.getsize(f'{template_work_dir}/exported_{template.model_template_id}/openvino.bin') - compressed_bin_size = os.path.getsize(f'{template_work_dir}/exported_nncf_{template.model_template_id}/openvino.bin') - assert compressed_bin_size < original_bin_size, f"{compressed_bin_size=}, {original_bin_size=}" - - -def nncf_eval_testing(template, root, ote_dir, args, threshold): - work_dir, template_work_dir, _ = get_some_vars(template, root) - command_line = ['ote', - 'eval', - template.model_template_id, - '--test-ann-file', - f'{os.path.join(ote_dir, args["--test-ann-files"])}', - '--test-data-roots', - f'{os.path.join(ote_dir, args["--test-data-roots"])}', - '--load-weights', - f'{template_work_dir}/nncf_{template.model_template_id}/weights.pth', - '--save-performance', - f'{template_work_dir}/nncf_{template.model_template_id}/performance.json'] - assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 - assert os.path.exists(f'{template_work_dir}/nncf_{template.model_template_id}/performance.json') - with open(f'{template_work_dir}/nncf_{template.model_template_id}/train_performance.json') as read_file: - trained_performance = json.load(read_file) - with open(f'{template_work_dir}/nncf_{template.model_template_id}/performance.json') as read_file: - evaluated_performance = json.load(read_file) - - for k in trained_performance.keys(): - assert ( - abs(trained_performance[k] - evaluated_performance[k]) - / (trained_performance[k] + 1e-10) - <= threshold - ), f"{trained_performance[k]=}, {evaluated_performance[k]=}" - - -def nncf_eval_openvino_testing(template, root, ote_dir, args): - work_dir, template_work_dir, _ = get_some_vars(template, root) - command_line = ['ote', - 'eval', - template.model_template_id, - '--test-ann-file', - f'{os.path.join(ote_dir, args["--test-ann-files"])}', - '--test-data-roots', - f'{os.path.join(ote_dir, args["--test-data-roots"])}', - '--load-weights', - f'{template_work_dir}/exported_nncf_{template.model_template_id}/openvino.xml', - '--save-performance', - f'{template_work_dir}/exported_nncf_{template.model_template_id}/performance.json'] - assert run(command_line, env=collect_env_vars(work_dir)).returncode == 0 - assert os.path.exists(f'{template_work_dir}/exported_nncf_{template.model_template_id}/performance.json') diff --git a/tests/ote_cli/external/anomaly/ote_cli b/tests/ote_cli/external/anomaly/ote_cli new file mode 120000 index 00000000000..b7a8e6b55c1 --- /dev/null +++ b/tests/ote_cli/external/anomaly/ote_cli @@ -0,0 +1 @@ +../../../../external/anomaly/tests/ote_cli \ No newline at end of file diff --git a/tests/ote_cli/external/deep-object-reid/ote_cli b/tests/ote_cli/external/deep-object-reid/ote_cli new file mode 120000 index 00000000000..ff593f72ea6 --- /dev/null +++ b/tests/ote_cli/external/deep-object-reid/ote_cli @@ -0,0 +1 @@ +../../../../external/deep-object-reid/tests/ote_cli \ No newline at end of file diff --git a/tests/ote_cli/external/mmdetection/ote_cli b/tests/ote_cli/external/mmdetection/ote_cli new file mode 120000 index 00000000000..7c90d654753 --- /dev/null +++ b/tests/ote_cli/external/mmdetection/ote_cli @@ -0,0 +1 @@ +../../../../external/mmdetection/tests/ote_cli \ No newline at end of file diff --git a/tests/ote_cli/external/mmsegmentation/ote_cli b/tests/ote_cli/external/mmsegmentation/ote_cli new file mode 120000 index 00000000000..898e3d4a498 --- /dev/null +++ b/tests/ote_cli/external/mmsegmentation/ote_cli @@ -0,0 +1 @@ +../../../../external/mmsegmentation/tests/ote_cli \ No newline at end of file diff --git a/tests/ote_cli/misc/test_code_checks.py b/tests/ote_cli/misc/test_code_checks.py new file mode 100644 index 00000000000..a42eb7ed770 --- /dev/null +++ b/tests/ote_cli/misc/test_code_checks.py @@ -0,0 +1,24 @@ +# Copyright (C) 2021 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions +# and limitations under the License. + +import os +from subprocess import run + +from ote_sdk.test_suite.e2e_test_system import e2e_pytest_component + +class TestCodeChecks: + @e2e_pytest_component + def test_code_checks(self): + wd = os.path.join(os.path.dirname(__file__), "..", "..", "..") + assert run(["./tests/run_code_checks.sh"], cwd=wd, check=True).returncode == 0 diff --git a/tests/run_model_templates_tests.py b/tests/run_model_templates_tests.py index 568a501678a..eb654533040 100644 --- a/tests/run_model_templates_tests.py +++ b/tests/run_model_templates_tests.py @@ -4,7 +4,7 @@ import sys from subprocess import run -from tests.ote_cli.common import collect_env_vars +from ote_cli.utils.tests import collect_env_vars ALGO_ROOT_DIR = "external" ALGO_DIRS = [ @@ -66,7 +66,7 @@ def test(run_algo_tests): success *= res for algo_dir in ALGO_DIRS: if run_algo_tests[algo_dir]: - command = ["pytest", os.path.join("tests", "ote_cli", algo_dir), "-v", "--durations=10"] + command = ["pytest", os.path.join(algo_dir, "tests", "ote_cli"), "-v", "--durations=10"] try: res = run(command, env=collect_env_vars(wd), check=True).returncode == 0 except: diff --git a/tests/run_model_templates_tests.sh b/tests/run_model_templates_tests.sh index 9b892f29275..9de5d74a3c4 100755 --- a/tests/run_model_templates_tests.sh +++ b/tests/run_model_templates_tests.sh @@ -1,11 +1,7 @@ -./tests/run_code_checks.sh || exit 1 - python3 -m venv venv || exit 1 . venv/bin/activate || exit 1 pip install --upgrade pip || exit 1 pip install -e ote_cli || exit 1 -pip install -e $OTE_SDK_PATH || exit 1 - -export PYTHONPATH=${PYTHONPATH}:`pwd` +pip install -e ote_sdk || exit 1 python tests/run_model_templates_tests.py `pwd` $@ || exit 1