Skip to content

Commit

Permalink
Fix reg tests (#2008)
Browse files Browse the repository at this point in the history
* Edit regression tests

* Change the dataset root

* Miss typo

* Fix pre-commit
  • Loading branch information
sungmanc authored Apr 17, 2023
1 parent 22429d2 commit cbce4e1
Show file tree
Hide file tree
Showing 14 changed files with 831 additions and 391 deletions.
34 changes: 24 additions & 10 deletions tests/regression/action/test_action_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,16 +19,21 @@
)
from tests.test_suite.e2e_test_system import e2e_pytest_component
from tests.test_suite.run_test_command import (
otx_eval_compare,
otx_eval_e2e_eval_time,
otx_eval_e2e_train_time,
otx_eval_openvino_testing,
otx_export_testing,
otx_train_testing,
pot_eval_testing,
pot_optimize_testing,
)

from tests.regression.regression_command import (
regression_eval_testing,
regression_openvino_testing,
regression_deployment_testing,
regression_nncf_eval_testing,
regression_pot_eval_testing,
regression_train_time_testing,
regression_eval_time_testing,
)

# Configurations for regression test.
TASK_TYPE = "action_classification"
TRAIN_TYPE = "supervised"
Expand Down Expand Up @@ -68,7 +73,7 @@ def test_otx_train(self, template, tmp_dir_path):
train_elapsed_time = timer() - train_start_time

infer_start_time = timer()
otx_eval_compare(
test_result = regression_eval_testing(
template,
tmp_dir_path,
otx_dir,
Expand All @@ -82,24 +87,29 @@ def test_otx_train(self, template, tmp_dir_path):
self.performance[template.name][TIME_LOG["infer_time"]] = round(infer_elapsed_time, 3)
result_dict[TASK_TYPE][LABEL_TYPE][TRAIN_TYPE]["train"].append(self.performance)

assert test_result["passed"] is True, test_result["log"]

@e2e_pytest_component
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def test_otx_train_kpi_test(self, template):
results = result_dict[TASK_TYPE][self.label_type][TRAIN_TYPE]["train"]
performance = get_template_performance(results, template)

otx_eval_e2e_train_time(
kpi_train_result = regression_train_time_testing(
train_time_criteria=action_cls_regression_config["kpi_e2e_train_time_criteria"]["train"],
e2e_train_time=performance[template.name][TIME_LOG["train_time"]],
template=template,
)

otx_eval_e2e_eval_time(
kpi_eval_result = regression_eval_time_testing(
eval_time_criteria=action_cls_regression_config["kpi_e2e_eval_time_criteria"]["train"],
e2e_eval_time=performance[template.name][TIME_LOG["infer_time"]],
template=template,
)

assert kpi_train_result["passed"] is True, kpi_train_result["log"]
assert kpi_eval_result["passed"] is True, kpi_eval_result["log"]

@e2e_pytest_component
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def test_otx_export_eval_openvino(self, template, tmp_dir_path):
Expand All @@ -113,7 +123,7 @@ def test_otx_export_eval_openvino(self, template, tmp_dir_path):
export_elapsed_time = timer() - export_start_time

export_eval_start_time = timer()
otx_eval_openvino_testing(
test_result = regression_openvino_testing(
template,
tmp_dir_path,
otx_dir,
Expand All @@ -129,6 +139,8 @@ def test_otx_export_eval_openvino(self, template, tmp_dir_path):
self.performance[template.name][TIME_LOG["export_eval_time"]] = round(export_eval_elapsed_time, 3)
result_dict[TASK_TYPE][self.label_type][TRAIN_TYPE]["export"].append(self.performance)

assert test_result["passed"] is True, test_result["log"]

@e2e_pytest_component
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def test_pot_optimize_eval(self, template, tmp_dir_path):
Expand All @@ -140,7 +152,7 @@ def test_pot_optimize_eval(self, template, tmp_dir_path):
pot_elapsed_time = timer() - pot_start_time

pot_eval_start_time = timer()
pot_eval_testing(
test_result = regression_pot_eval_testing(
template,
tmp_dir_path,
otx_dir,
Expand All @@ -154,3 +166,5 @@ def test_pot_optimize_eval(self, template, tmp_dir_path):
self.performance[template.name][TIME_LOG["pot_time"]] = round(pot_elapsed_time, 3)
self.performance[template.name][TIME_LOG["pot_eval_time"]] = round(pot_eval_elapsed_time, 3)
result_dict[TASK_TYPE][self.label_type][TRAIN_TYPE]["pot"].append(self.performance)

assert test_result["passed"] is True, test_result["log"]
24 changes: 18 additions & 6 deletions tests/regression/action/test_action_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,19 @@
)
from tests.test_suite.e2e_test_system import e2e_pytest_component
from tests.test_suite.run_test_command import (
otx_eval_compare,
otx_eval_e2e_eval_time,
otx_eval_e2e_train_time,
otx_train_testing,
)

from tests.regression.regression_command import (
regression_eval_testing,
regression_openvino_testing,
regression_deployment_testing,
regression_nncf_eval_testing,
regression_pot_eval_testing,
regression_train_time_testing,
regression_eval_time_testing,
)

# Configurations for regression test.
TASK_TYPE = "action_detection"
TRAIN_TYPE = "supervised"
Expand Down Expand Up @@ -64,7 +71,7 @@ def test_otx_train(self, template, tmp_dir_path):
train_elapsed_time = timer() - train_start_time

infer_start_time = timer()
otx_eval_compare(
test_result = regression_eval_testing(
template,
tmp_dir_path,
otx_dir,
Expand All @@ -78,20 +85,25 @@ def test_otx_train(self, template, tmp_dir_path):
self.performance[template.name][TIME_LOG["infer_time"]] = round(infer_elapsed_time, 3)
result_dict[TASK_TYPE][LABEL_TYPE][TRAIN_TYPE]["train"].append(self.performance)

assert test_result["passed"] is True, test_result["log"]

@e2e_pytest_component
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def test_otx_train_kpi_test(self, template):
results = result_dict[TASK_TYPE][self.label_type][TRAIN_TYPE]["train"]
performance = get_template_performance(results, template)

otx_eval_e2e_train_time(
kpi_train_result = regression_train_time_testing(
train_time_criteria=action_det_regression_config["kpi_e2e_train_time_criteria"]["train"],
e2e_train_time=performance[template.name][TIME_LOG["train_time"]],
template=template,
)

otx_eval_e2e_eval_time(
kpi_eval_result = regression_eval_time_testing(
eval_time_criteria=action_det_regression_config["kpi_e2e_eval_time_criteria"]["train"],
e2e_eval_time=performance[template.name][TIME_LOG["infer_time"]],
template=template,
)

assert kpi_train_result["passed"] is True, kpi_train_result["log"]
assert kpi_eval_result["passed"] is True, kpi_eval_result["log"]
44 changes: 30 additions & 14 deletions tests/regression/anomaly/test_anomaly_classificaiton.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,20 +20,23 @@
)
from tests.test_suite.e2e_test_system import e2e_pytest_component
from tests.test_suite.run_test_command import (
nncf_eval_testing,
nncf_optimize_testing,
otx_deploy_openvino_testing,
otx_eval_compare,
otx_eval_deployment_testing,
otx_eval_e2e_eval_time,
otx_eval_e2e_train_time,
otx_eval_openvino_testing,
otx_export_testing,
otx_train_testing,
pot_eval_testing,
pot_optimize_testing,
)

from tests.regression.regression_command import (
regression_eval_testing,
regression_openvino_testing,
regression_deployment_testing,
regression_nncf_eval_testing,
regression_pot_eval_testing,
regression_train_time_testing,
regression_eval_time_testing,
)

# Configurations for regression test.
TASK_TYPE = "anomaly_classification"
SAMPLED_ANOMALY_DATASET_CATEGORIES = random.sample(ANOMALY_DATASET_CATEGORIES, 15)
Expand Down Expand Up @@ -82,7 +85,7 @@ def test_otx_train(self, template, tmp_dir_path, category):
train_elapsed_time = timer() - train_start_time

infer_start_time = timer()
otx_eval_compare(
test_result = regression_eval_testing(
template,
tmp_dir_path,
otx_dir,
Expand All @@ -96,6 +99,8 @@ def test_otx_train(self, template, tmp_dir_path, category):
self.performance[template.name][TIME_LOG["infer_time"]] = round(infer_elapsed_time, 3)
result_dict[TASK_TYPE]["train"][category].append(self.performance)

assert test_result["passed"] is True, test_result["log"]

@e2e_pytest_component
@pytest.mark.parametrize("template", templates, ids=templates_ids)
@pytest.mark.parametrize("category", SAMPLED_ANOMALY_DATASET_CATEGORIES)
Expand All @@ -105,7 +110,7 @@ def test_otx_train_kpi_test(self, template, category):
performance = get_template_performance(results, template)

# Compare train+val time with the KPI criteria.
otx_eval_e2e_train_time(
kpi_train_result = regression_train_time_testing(
train_time_criteria=anomaly_classification_regression_config["kpi_e2e_train_time_criteria"]["train"][
category
],
Expand All @@ -114,14 +119,17 @@ def test_otx_train_kpi_test(self, template, category):
)

# Compare evaluation time with the KPI criteria.
otx_eval_e2e_eval_time(
kpi_eval_result = regression_eval_time_testing(
eval_time_criteria=anomaly_classification_regression_config["kpi_e2e_eval_time_criteria"]["train"][
category
],
e2e_eval_time=performance[template.name][TIME_LOG["infer_time"]],
template=template,
)

assert kpi_train_result["passed"] is True, kpi_train_result["log"]
assert kpi_eval_result["passed"] is True, kpi_eval_result["log"]

@e2e_pytest_component
@pytest.mark.parametrize("template", templates, ids=templates_ids)
@pytest.mark.parametrize("category", SAMPLED_ANOMALY_DATASET_CATEGORIES)
Expand All @@ -135,7 +143,7 @@ def test_otx_export_eval_openvino(self, template, tmp_dir_path, category):
export_elapsed_time = timer() - export_start_time

export_eval_start_time = timer()
otx_eval_openvino_testing(
test_result = regression_openvino_testing(
template,
tmp_dir_path,
otx_dir,
Expand All @@ -151,6 +159,8 @@ def test_otx_export_eval_openvino(self, template, tmp_dir_path, category):
self.performance[template.name][TIME_LOG["export_eval_time"]] = round(export_eval_elapsed_time, 3)
result_dict[TASK_TYPE]["export"][category].append(self.performance)

assert test_result["passed"] is True, test_result["log"]

@e2e_pytest_component
@pytest.mark.parametrize("template", templates, ids=templates_ids)
@pytest.mark.parametrize("category", SAMPLED_ANOMALY_DATASET_CATEGORIES)
Expand All @@ -164,7 +174,7 @@ def test_otx_deploy_eval_deployment(self, template, tmp_dir_path, category):
deploy_elapsed_time = timer() - deploy_start_time

deploy_eval_start_time = timer()
otx_eval_deployment_testing(
test_result = regression_deployment_testing(
template,
tmp_dir_path,
otx_dir,
Expand All @@ -180,6 +190,8 @@ def test_otx_deploy_eval_deployment(self, template, tmp_dir_path, category):
self.performance[template.name][TIME_LOG["deploy_eval_time"]] = round(deploy_eval_elapsed_time, 3)
result_dict[TASK_TYPE]["deploy"][category].append(self.performance)

assert test_result["passed"] is True, test_result["log"]

@e2e_pytest_component
@pytest.mark.parametrize("template", templates, ids=templates_ids)
@pytest.mark.parametrize("category", SAMPLED_ANOMALY_DATASET_CATEGORIES)
Expand All @@ -196,7 +208,7 @@ def test_nncf_optimize_eval(self, template, tmp_dir_path, category):
nncf_elapsed_time = timer() - nncf_start_time

nncf_eval_start_time = timer()
nncf_eval_testing(
test_result = regression_nncf_eval_testing(
template,
tmp_dir_path,
otx_dir,
Expand All @@ -212,6 +224,8 @@ def test_nncf_optimize_eval(self, template, tmp_dir_path, category):
self.performance[template.name][TIME_LOG["nncf_eval_time"]] = round(nncf_eval_elapsed_time, 3)
result_dict[TASK_TYPE]["nncf"][category].append(self.performance)

assert test_result["passed"] is True, test_result["log"]

@e2e_pytest_component
@pytest.mark.parametrize("template", templates, ids=templates_ids)
@pytest.mark.parametrize("category", SAMPLED_ANOMALY_DATASET_CATEGORIES)
Expand All @@ -225,7 +239,7 @@ def test_pot_optimize_eval(self, template, tmp_dir_path, category):
pot_elapsed_time = timer() - pot_start_time

pot_eval_start_time = timer()
pot_eval_testing(
test_result = regression_pot_eval_testing(
template,
tmp_dir_path,
otx_dir,
Expand All @@ -239,3 +253,5 @@ def test_pot_optimize_eval(self, template, tmp_dir_path, category):
self.performance[template.name][TIME_LOG["pot_time"]] = round(pot_elapsed_time, 3)
self.performance[template.name][TIME_LOG["pot_eval_time"]] = round(pot_eval_elapsed_time, 3)
result_dict[TASK_TYPE]["pot"][category].append(self.performance)

assert test_result["passed"] is True, test_result["log"]
Loading

0 comments on commit cbce4e1

Please sign in to comment.