From 2597156e0c25537dcfe531552b15a69243587717 Mon Sep 17 00:00:00 2001 From: sungmanc Date: Fri, 7 Jul 2023 11:40:49 +0900 Subject: [PATCH 1/8] Make black happy --- .../adapters/mmcls/datasets/otx_datasets.py | 5 ++- .../models/classifiers/sam_classifier.py | 32 +++++++++++++++---- src/otx/algorithms/classification/task.py | 2 ++ 3 files changed, 31 insertions(+), 8 deletions(-) diff --git a/src/otx/algorithms/classification/adapters/mmcls/datasets/otx_datasets.py b/src/otx/algorithms/classification/adapters/mmcls/datasets/otx_datasets.py index 99194cbc23e..3d984f8502d 100644 --- a/src/otx/algorithms/classification/adapters/mmcls/datasets/otx_datasets.py +++ b/src/otx/algorithms/classification/adapters/mmcls/datasets/otx_datasets.py @@ -416,7 +416,10 @@ def evaluate( ) eval_results["MHAcc"] = total_acc - eval_results["avgClsAcc"] = total_acc_sl / self.hierarchical_info["num_multiclass_heads"] + if self.hierarchical_info["num_multiclass_head"] > 0: + eval_results["avgClsAcc"] = total_acc_sl / self.hierarchical_info["num_multiclass_heads"] + else: + eval_results["avgClsAcc"] = total_acc_sl eval_results["mAP"] = mAP_value eval_results["accuracy"] = total_acc diff --git a/src/otx/algorithms/classification/adapters/mmcls/models/classifiers/sam_classifier.py b/src/otx/algorithms/classification/adapters/mmcls/models/classifiers/sam_classifier.py index 68249a8f6be..5b03e7c5f0a 100644 --- a/src/otx/algorithms/classification/adapters/mmcls/models/classifiers/sam_classifier.py +++ b/src/otx/algorithms/classification/adapters/mmcls/models/classifiers/sam_classifier.py @@ -16,6 +16,14 @@ logger = get_logger() +def is_hierarchical_chkpt(chkpt: dict): + """Detect whether previous checkpoint is hierarchical or not.""" + for k, v in chkpt.items(): + if "fc" in k: + return True + return False + + @CLASSIFIERS.register_module() class SAMImageClassifier(SAMClassifierMixin, ClsLossDynamicsTrackingMixin, ImageClassifier): """SAM-enabled ImageClassifier.""" @@ -193,11 +201,19 @@ def load_state_dict_pre_hook(module, state_dict, prefix, *args, **kwargs): # no def load_state_dict_mixing_hook( model, model_classes, chkpt_classes, chkpt_dict, prefix, *args, **kwargs ): # pylint: disable=unused-argument, too-many-branches, too-many-locals - """Modify input state_dict according to class name matching before weight loading.""" + """Modify input state_dict according to class name matching before weight loading. + + If previous training is hierarchical training, + then the current training should be hierarchical training. vice versa. + + """ backbone_type = type(model.backbone).__name__ if backbone_type not in ["OTXMobileNetV3", "OTXEfficientNet", "OTXEfficientNetV2"]: return + if model.hierarchical != is_hierarchical_chkpt(chkpt_dict): + return + # Dst to src mapping index model_classes = list(model_classes) chkpt_classes = list(chkpt_classes) @@ -249,13 +265,15 @@ def load_state_dict_mixing_hook( continue # Mix weights - chkpt_param = chkpt_dict[chkpt_name] - for module, c in enumerate(model2chkpt): - if c >= 0: - model_param[module].copy_(chkpt_param[c]) + # NOTE: Label mix is not supported for H-label classification. + if not model.hierarchical: + chkpt_param = chkpt_dict[chkpt_name] + for module, c in enumerate(model2chkpt): + if c >= 0: + model_param[module].copy_(chkpt_param[c]) - # Replace checkpoint weight by mixed weights - chkpt_dict[chkpt_name] = model_param + # Replace checkpoint weight by mixed weights + chkpt_dict[chkpt_name] = model_param def extract_feat(self, img): """Directly extract features from the backbone + neck. diff --git a/src/otx/algorithms/classification/task.py b/src/otx/algorithms/classification/task.py index eb6fab8161d..265b4cbeb2a 100644 --- a/src/otx/algorithms/classification/task.py +++ b/src/otx/algorithms/classification/task.py @@ -47,6 +47,8 @@ from otx.api.entities.inference_parameters import ( default_progress_callback as default_infer_progress_callback, ) +from otx.api.entities.label import LabelEntity +from otx.api.entities.label_schema import LabelGroup from otx.api.entities.metadata import FloatMetadata, FloatType from otx.api.entities.metrics import ( CurveMetric, From 3e8a307f6423527ad63ba5ff924dc3abf3e8505a Mon Sep 17 00:00:00 2001 From: sungmanc Date: Fri, 7 Jul 2023 11:40:49 +0900 Subject: [PATCH 2/8] Fix conflicts --- src/otx/algorithms/classification/task.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/src/otx/algorithms/classification/task.py b/src/otx/algorithms/classification/task.py index 265b4cbeb2a..6f863c0bb1a 100644 --- a/src/otx/algorithms/classification/task.py +++ b/src/otx/algorithms/classification/task.py @@ -127,16 +127,22 @@ def __init__(self, task_environment: TaskEnvironment, output_path: Optional[str] if self._task_environment.model is not None: self._load_model() + def _is_multi_label(self, label_groups: List[LabelGroup], all_labels: List[LabelEntity]): + """Check whether the current training mode is multi-label or not.""" + # NOTE: In the current Geti, multi-label should have `___` symbol for all group names. + find_multilabel_symbol = ["___" in getattr(i, "name", "") for i in label_groups] + return ( + (len(label_groups) > 1) and (len(label_groups) == len(all_labels)) and (False not in find_multilabel_symbol) + ) + def _set_train_mode(self): - self._multilabel = len(self._task_environment.label_schema.get_groups(False)) > 1 and len( - self._task_environment.label_schema.get_groups(False) - ) == len( - self._task_environment.get_labels(include_empty=False) - ) # noqa:E127 + label_groups = self._task_environment.label_schema.get_groups(include_empty=False) + all_labels = self._task_environment.label_schema.get_labels(include_empty=False) + + self._multilabel = self._is_multi_label(label_groups, all_labels) if self._multilabel: logger.info("Classification mode: multilabel") - - if not self._multilabel and len(self._task_environment.label_schema.get_groups(False)) > 1: + elif len(label_groups) > 1: logger.info("Classification mode: hierarchical") self._hierarchical = True self._hierarchical_info = get_hierarchical_info(self._task_environment.label_schema) From 76030182e040d60617ae41b89e45bc5cba6584df Mon Sep 17 00:00:00 2001 From: Sungman Cho Date: Fri, 7 Jul 2023 17:51:19 +0900 Subject: [PATCH 3/8] Merge-back: add test datasets and edit the test code --- .../annotations/train.json | 181 ++++++++++++++++++ .../annotations/validation.json | 141 ++++++++++++++ .../images/train/a.jpg | Bin 0 -> 631 bytes .../images/train/b.jpg | Bin 0 -> 631 bytes .../images/validation/d.jpg | Bin 0 -> 631 bytes .../annotations/train.json | 20 +- .../annotations/validation.json | 22 ++- .../annotations/train.json | 68 +++++++ .../annotations/validation.json | 54 ++++++ .../images/train/a.jpg | Bin 0 -> 631 bytes .../images/train/b.jpg | Bin 0 -> 631 bytes .../images/validation/d.jpg | Bin 0 -> 631 bytes .../cli/classification/test_classification.py | 123 +++++------- 13 files changed, 525 insertions(+), 84 deletions(-) create mode 100755 tests/assets/datumaro_h-label_class_decremental/annotations/train.json create mode 100755 tests/assets/datumaro_h-label_class_decremental/annotations/validation.json create mode 100644 tests/assets/datumaro_h-label_class_decremental/images/train/a.jpg create mode 100644 tests/assets/datumaro_h-label_class_decremental/images/train/b.jpg create mode 100644 tests/assets/datumaro_h-label_class_decremental/images/validation/d.jpg create mode 100755 tests/assets/datumaro_multilabel_class_decremental/annotations/train.json create mode 100755 tests/assets/datumaro_multilabel_class_decremental/annotations/validation.json create mode 100644 tests/assets/datumaro_multilabel_class_decremental/images/train/a.jpg create mode 100644 tests/assets/datumaro_multilabel_class_decremental/images/train/b.jpg create mode 100644 tests/assets/datumaro_multilabel_class_decremental/images/validation/d.jpg diff --git a/tests/assets/datumaro_h-label_class_decremental/annotations/train.json b/tests/assets/datumaro_h-label_class_decremental/annotations/train.json new file mode 100755 index 00000000000..4bb4caae751 --- /dev/null +++ b/tests/assets/datumaro_h-label_class_decremental/annotations/train.json @@ -0,0 +1,181 @@ +{ + "info": {}, + "categories": { + "label": { + "labels": [ + { + "name": "right", + "parent": "triangle", + "attributes": [] + }, + { + "name": "multi a", + "parent": "triangle", + "attributes": [] + }, + { + "name": "equilateral", + "parent": "triangle", + "attributes": [] + }, + { + "name": "square", + "parent": "rectangle", + "attributes": [] + }, + { + "name": "triangle", + "parent": "", + "attributes": [] + }, + { + "name": "non_square", + "parent": "rectangle", + "attributes": [] + }, + { + "name": "rectangle", + "parent": "", + "attributes": [] + } + ], + "label_groups": [ + { + "name": "shape", + "group_type": "exclusive", + "labels": ["rectangle", "triangle"] + }, + { + "name": "rectangle default", + "group_type": "exclusive", + "labels": ["non_square", "square"] + }, + { + "name": "triangle default", + "group_type": "exclusive", + "labels": ["equilateral", "right"] + }, + { + "name": "shape___multiple example___multi a", + "group_type": "exclusive", + "labels": ["multi a"] + } + ], + "attributes": [] + }, + "mask": { + "colormap": [ + { + "label_id": 0, + "r": 129, + "g": 64, + "b": 123 + }, + { + "label_id": 1, + "r": 91, + "g": 105, + "b": 255 + }, + { + "label_id": 2, + "r": 91, + "g": 105, + "b": 255 + }, + { + "label_id": 3, + "r": 255, + "g": 86, + "b": 98 + }, + { + "label_id": 4, + "r": 204, + "g": 148, + "b": 218 + }, + { + "label_id": 5, + "r": 0, + "g": 251, + "b": 87 + }, + { + "label_id": 6, + "r": 84, + "g": 143, + "b": 173 + } + ] + } + }, + "items": [ + { + "id": "a", + "annotations": [ + { + "id": 0, + "type": "label", + "attributes": {}, + "group": 0, + "label_id": 4 + }, + { + "id": 0, + "type": "label", + "attributes": {}, + "group": 0, + "label_id": 5 + }, + { + "id": 0, + "type": "label", + "attributes": {}, + "group": 0, + "label_id": 1 + } + ], + "image": { + "path": "a.jpg", + "size": [10, 5] + }, + "media": { + "path": "" + } + }, + { + "id": "b", + "annotations": [ + { + "id": 0, + "type": "label", + "attributes": {}, + "group": 0, + "label_id": 6 + }, + { + "id": 0, + "type": "label", + "attributes": {}, + "group": 0, + "label_id": 5 + }, + { + "id": 0, + "type": "label", + "attributes": {}, + "group": 0, + "label_id": 2 + } + ], + "image": { + "path": "b.jpg", + "size": [10, 5] + }, + "media": { + "path": "" + } + } + ] +} diff --git a/tests/assets/datumaro_h-label_class_decremental/annotations/validation.json b/tests/assets/datumaro_h-label_class_decremental/annotations/validation.json new file mode 100755 index 00000000000..d97956af708 --- /dev/null +++ b/tests/assets/datumaro_h-label_class_decremental/annotations/validation.json @@ -0,0 +1,141 @@ +{ + "info": {}, + "categories": { + "label": { + "labels": [ + { + "name": "right", + "parent": "triangle", + "attributes": [] + }, + { + "name": "multi a", + "parent": "triangle", + "attributes": [] + }, + { + "name": "equilateral", + "parent": "triangle", + "attributes": [] + }, + { + "name": "square", + "parent": "rectangle", + "attributes": [] + }, + { + "name": "triangle", + "parent": "", + "attributes": [] + }, + { + "name": "non_square", + "parent": "rectangle", + "attributes": [] + }, + { + "name": "rectangle", + "parent": "", + "attributes": [] + } + ], + "label_groups": [ + { + "name": "shape", + "group_type": "exclusive", + "labels": ["rectangle", "triangle"] + }, + { + "name": "rectangle default", + "group_type": "exclusive", + "labels": ["non_square", "square"] + }, + { + "name": "triangle default", + "group_type": "exclusive", + "labels": ["equilateral", "right"] + }, + { + "name": "shape___multiple example___multi a", + "group_type": "exclusive", + "labels": ["multi a"] + } + ], + "attributes": [] + }, + "mask": { + "colormap": [ + { + "label_id": 0, + "r": 129, + "g": 64, + "b": 123 + }, + { + "label_id": 1, + "r": 91, + "g": 105, + "b": 255 + }, + { + "label_id": 2, + "r": 91, + "g": 105, + "b": 255 + }, + { + "label_id": 3, + "r": 255, + "g": 86, + "b": 98 + }, + { + "label_id": 4, + "r": 204, + "g": 148, + "b": 218 + }, + { + "label_id": 5, + "r": 0, + "g": 251, + "b": 87 + }, + { + "label_id": 6, + "r": 84, + "g": 143, + "b": 173 + } + ] + } + }, + "items": [ + { + "id": "d", + "annotations": [ + { + "id": 0, + "type": "label", + "attributes": {}, + "group": 0, + "label_id": 5 + }, + { + "id": 0, + "type": "label", + "attributes": {}, + "group": 0, + "label_id": 2 + } + ], + "image": { + "path": "d.jpg", + "size": [10, 5] + }, + "media": { + "path": "" + } + } + ] +} diff --git a/tests/assets/datumaro_h-label_class_decremental/images/train/a.jpg b/tests/assets/datumaro_h-label_class_decremental/images/train/a.jpg new file mode 100644 index 0000000000000000000000000000000000000000..222682d80bf9740d8eb672035ae34a240f949592 GIT binary patch literal 631 zcmex=^(PF6}rMnOeST|r4lSw=>~TvNxu(8R<c1}I=;VrF4wW9Q)H;sz?% zD!{d!pzFb!U9xX3zTPI5o8roG<0MW4oqZMDikqloVbuf*=gfJ(V&YTRE(2~ znmD<{#3dx9RMpfqG__1j&CD$#!v`*nMGf}^(PF6}rMnOeST|r4lSw=>~TvNxu(8R<c1}I=;VrF4wW9Q)H;sz?% zD!{d!pzFb!U9xX3zTPI5o8roG<0MW4oqZMDikqloVbuf*=gfJ(V&YTRE(2~ znmD<{#3dx9RMpfqG__1j&CD$#!v`*nMGf}^(PF6}rMnOeST|r4lSw=>~TvNxu(8R<c1}I=;VrF4wW9Q)H;sz?% zD!{d!pzFb!U9xX3zTPI5o8roG<0MW4oqZMDikqloVbuf*=gfJ(V&YTRE(2~ znmD<{#3dx9RMpfqG__1j&CD$#!v`*nMGf}^(PF6}rMnOeST|r4lSw=>~TvNxu(8R<c1}I=;VrF4wW9Q)H;sz?% zD!{d!pzFb!U9xX3zTPI5o8roG<0MW4oqZMDikqloVbuf*=gfJ(V&YTRE(2~ znmD<{#3dx9RMpfqG__1j&CD$#!v`*nMGf}^(PF6}rMnOeST|r4lSw=>~TvNxu(8R<c1}I=;VrF4wW9Q)H;sz?% zD!{d!pzFb!U9xX3zTPI5o8roG<0MW4oqZMDikqloVbuf*=gfJ(V&YTRE(2~ znmD<{#3dx9RMpfqG__1j&CD$#!v`*nMGf}^(PF6}rMnOeST|r4lSw=>~TvNxu(8R<c1}I=;VrF4wW9Q)H;sz?% zD!{d!pzFb!U9xX3zTPI5o8roG<0MW4oqZMDikqloVbuf*=gfJ(V&YTRE(2~ znmD<{#3dx9RMpfqG__1j&CD$#!v`*nMGf} Date: Mon, 10 Jul 2023 13:38:04 +0900 Subject: [PATCH 4/8] Make black happy --- src/otx/algorithms/classification/task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/otx/algorithms/classification/task.py b/src/otx/algorithms/classification/task.py index 6f863c0bb1a..3c74230dcab 100644 --- a/src/otx/algorithms/classification/task.py +++ b/src/otx/algorithms/classification/task.py @@ -134,7 +134,7 @@ def _is_multi_label(self, label_groups: List[LabelGroup], all_labels: List[Label return ( (len(label_groups) > 1) and (len(label_groups) == len(all_labels)) and (False not in find_multilabel_symbol) ) - + def _set_train_mode(self): label_groups = self._task_environment.label_schema.get_groups(include_empty=False) all_labels = self._task_environment.label_schema.get_labels(include_empty=False) From cfce9a15c6c4add6ec61ee62ad91993ca8df05ab Mon Sep 17 00:00:00 2001 From: sungmanc Date: Mon, 10 Jul 2023 13:45:27 +0900 Subject: [PATCH 5/8] Fix mis-merge --- .../cli/classification/test_classification.py | 105 +++++++++++++----- 1 file changed, 79 insertions(+), 26 deletions(-) diff --git a/tests/integration/cli/classification/test_classification.py b/tests/integration/cli/classification/test_classification.py index e11929c8226..d0629daea66 100644 --- a/tests/integration/cli/classification/test_classification.py +++ b/tests/integration/cli/classification/test_classification.py @@ -34,9 +34,9 @@ # Pre-train w/ 'label_0', 'label_1', 'label_2' classes args = { - "--train-data-roots": "tests/assets/classification_dataset", - "--val-data-roots": "tests/assets/classification_dataset", - "--test-data-roots": "tests/assets/classification_dataset", + "--train-data-roots": "tests/assets/classification_dataset_class_incremental", + "--val-data-roots": "tests/assets/classification_dataset_class_incremental", + "--test-data-roots": "tests/assets/classification_dataset_class_incremental", "--input": "tests/assets/classification_dataset/0", "train_params": [ "params", @@ -50,15 +50,8 @@ # Warmstart using data w/ 'intel', 'openvino', 'opencv' classes args_selfsl = { "--train-data-roots": "tests/assets/classification_dataset", - "train_params": [ - "params", - "--learning_parameters.num_iters", - "1", - "--learning_parameters.batch_size", - "4", - "--algo_backend.train_type", - "Selfsupervised", - ], + "--train-type": "Selfsupervised", + "train_params": ["params", "--learning_parameters.num_iters", "1", "--learning_parameters.batch_size", "4"], } # Training params for resume, num_iters*2 @@ -76,7 +69,7 @@ MULTI_GPU_UNAVAILABLE = torch.cuda.device_count() <= 1 default_template = parse_model_template( os.path.join( - "otx/algorithms/classification", + "src/otx/algorithms/classification", "configs", "efficientnet_b0_cls_incr", "template.yaml", @@ -85,7 +78,7 @@ default_templates = [default_template] default_templates_ids = [default_template.model_template_id] -templates = Registry("otx/algorithms/classification").filter(task_type="CLASSIFICATION").templates +templates = Registry("src/otx/algorithms/classification").filter(task_type="CLASSIFICATION").templates templates_ids = [template.model_template_id for template in templates] @@ -121,8 +114,10 @@ def test_otx_resume(self, template, tmp_dir_path): @pytest.mark.parametrize("template", templates, ids=templates_ids) @pytest.mark.parametrize("dump_features", [True, False]) def test_otx_export(self, template, tmp_dir_path, dump_features): + if template.name == "DeiT-Tiny" and dump_features: + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "multi_class_cls" - otx_export_testing(template, tmp_dir_path, dump_features) + otx_export_testing(template, tmp_dir_path, dump_features, check_ir_meta=True) @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) @@ -130,6 +125,12 @@ def test_otx_export_fp16(self, template, tmp_dir_path): tmp_dir_path = tmp_dir_path / "multi_class_cls" otx_export_testing(template, tmp_dir_path, half_precision=True) + @e2e_pytest_component + @pytest.mark.parametrize("template", templates, ids=templates_ids) + def test_otx_export_onnx(self, template, tmp_dir_path): + tmp_dir_path = tmp_dir_path / "multi_class_cls" + otx_export_testing(template, tmp_dir_path, half_precision=False, is_onnx=True) + @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) def test_otx_eval(self, template, tmp_dir_path): @@ -139,36 +140,48 @@ def test_otx_eval(self, template, tmp_dir_path): @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) def test_otx_explain(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "multi_class_cls" otx_explain_testing(template, tmp_dir_path, otx_dir, args) @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) def test_otx_explain_all_classes(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "multi_class_cls" otx_explain_testing_all_classes(template, tmp_dir_path, otx_dir, args) @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) def test_otx_explain_process_saliency_maps(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "multi_class_cls" otx_explain_testing_process_saliency_maps(template, tmp_dir_path, otx_dir, args) @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) def test_otx_explain_openvino(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "multi_class_cls" otx_explain_openvino_testing(template, tmp_dir_path, otx_dir, args) @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) def test_otx_explain_all_classes_openvino(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "multi_class_cls" otx_explain_all_classes_openvino_testing(template, tmp_dir_path, otx_dir, args) @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) def test_otx_explain_process_saliency_maps_openvino(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "multi_class_cls" otx_explain_process_saliency_maps_openvino_testing(template, tmp_dir_path, otx_dir, args) @@ -221,8 +234,9 @@ def test_otx_train_semisl(self, template, tmp_dir_path): tmp_dir_path = tmp_dir_path / "multi_class_cls/test_semisl" args_semisl = copy.deepcopy(args) args_semisl["--unlabeled-data-roots"] = args["--train-data-roots"] - args_semisl["train_params"].extend(["--algo_backend.train_type", "Semisupervised"]) otx_train_testing(template, tmp_dir_path, otx_dir, args_semisl) + template_dir = get_template_dir(template, tmp_dir_path) + assert os.path.exists(f"{template_dir}/semisl") @e2e_pytest_component @pytest.mark.skipif(MULTI_GPU_UNAVAILABLE, reason="The number of gpu is insufficient") @@ -231,15 +245,18 @@ def test_otx_multi_gpu_train_semisl(self, template, tmp_dir_path): tmp_dir_path = tmp_dir_path / "multi_class_cls/test_multi_gpu_semisl" args_semisl_multigpu = copy.deepcopy(args) args_semisl_multigpu["--unlabeled-data-roots"] = args["--train-data-roots"] - args_semisl_multigpu["train_params"].extend(["--algo_backend.train_type", "Semisupervised"]) args_semisl_multigpu["--gpus"] = "0,1" otx_train_testing(template, tmp_dir_path, otx_dir, args_semisl_multigpu) + template_dir = get_template_dir(template, tmp_dir_path) + assert os.path.exists(f"{template_dir}/semisl") @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) def test_otx_train_selfsl(self, template, tmp_dir_path): tmp_dir_path = tmp_dir_path / "multi_class_cls/test_selfsl" otx_train_testing(template, tmp_dir_path, otx_dir, args_selfsl) + template_dir = get_template_dir(template, tmp_dir_path) + assert os.path.exists(f"{template_dir}/selfsl") @e2e_pytest_component @pytest.mark.skipif(MULTI_GPU_UNAVAILABLE, reason="The number of gpu is insufficient") @@ -249,6 +266,8 @@ def test_otx_multi_gpu_train_selfsl(self, template, tmp_dir_path): args_selfsl_multigpu = copy.deepcopy(args_selfsl) args_selfsl_multigpu["--gpus"] = "0,1" otx_train_testing(template, tmp_dir_path, otx_dir, args_selfsl_multigpu) + template_dir = get_template_dir(template, tmp_dir_path) + assert os.path.exists(f"{template_dir}/selfsl") @e2e_pytest_component @pytest.mark.parametrize("template", templates, ids=templates_ids) @@ -266,11 +285,20 @@ def test_otx_train_enable_noisy_lable_detection(self, template, tmp_dir_path): @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) - def test_otx_train_auto_decrease_batch_size(self, template, tmp_dir_path): - decrease_bs_args = copy.deepcopy(args) - decrease_bs_args["train_params"].extend(["--learning_parameters.auto_decrease_batch_size", "true"]) - tmp_dir_path = tmp_dir_path / "multi_class_cls_auto_decrease_batch_size" - otx_train_testing(template, tmp_dir_path, otx_dir, decrease_bs_args) + @pytest.mark.parametrize("bs_adapt_type", ["Safe", "Full"]) + def test_otx_train_auto_adapt_batch_size(self, template, tmp_dir_path, bs_adapt_type): + adapting_bs_args = copy.deepcopy(args) + adapting_bs_args["train_params"].extend(["--learning_parameters.auto_adapt_batch_size", bs_adapt_type]) + tmp_dir_path = tmp_dir_path / f"multi_class_cls_auto_adapt_{bs_adapt_type}_batch_size" + otx_train_testing(template, tmp_dir_path, otx_dir, adapting_bs_args) + + @e2e_pytest_component + @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) + def test_otx_train_auto_adapt_num_workers(self, template, tmp_dir_path): + adapting_num_workers_args = copy.deepcopy(args) + adapting_num_workers_args["train_params"].extend(["--learning_parameters.auto_num_workers", "True"]) + tmp_dir_path = tmp_dir_path / f"multi_class_cls_auto_adapt_num_workers" + otx_train_testing(template, tmp_dir_path, otx_dir, adapting_num_workers_args) # Multi-label training w/ 'car', 'tree', 'bug' classes @@ -307,7 +335,7 @@ def test_otx_train_cls_decr(self, template, tmp_dir_path): args1["--val-data-roots"] = "tests/assets/datumaro_multilabel_class_decremental" args1["--load-weights"] = f"{template_work_dir}/trained_{template.model_template_id}/models/weights.pth" otx_train_testing(template, tmp_dir_path, otx_dir, args1) - + @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) @pytest.mark.parametrize("dump_features", [True, False]) @@ -330,36 +358,48 @@ def test_otx_eval(self, template, tmp_dir_path): @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) def test_otx_explain(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "multi_label_cls" otx_explain_testing(template, tmp_dir_path, otx_dir, args_m) @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) def test_otx_explain_all_classes(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "multi_label_cls" otx_explain_testing_all_classes(template, tmp_dir_path, otx_dir, args_m) @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) def test_otx_explain_process_saliency_maps(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "multi_label_cls" otx_explain_testing_process_saliency_maps(template, tmp_dir_path, otx_dir, args_m) @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) def test_otx_explain_openvino(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "multi_label_cls" otx_explain_openvino_testing(template, tmp_dir_path, otx_dir, args_m) @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) def test_otx_explain_all_classes_openvino(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "multi_label_cls" otx_explain_all_classes_openvino_testing(template, tmp_dir_path, otx_dir, args_m) @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) def test_otx_explain_process_saliency_maps_openvino(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "multi_label_cls" otx_explain_process_saliency_maps_openvino_testing(template, tmp_dir_path, otx_dir, args_m) @@ -397,8 +437,9 @@ def test_otx_train_semisl(self, template, tmp_dir_path): tmp_dir_path = tmp_dir_path / "multi_label_cls" / "test_semisl" args_semisl = copy.deepcopy(args_m) args_semisl["--unlabeled-data-roots"] = args_m["--train-data-roots"] - args_semisl["train_params"].extend(["--algo_backend.train_type", "Semisupervised"]) otx_train_testing(template, tmp_dir_path, otx_dir, args_semisl) + template_dir = get_template_dir(template, tmp_dir_path) + assert os.path.exists(f"{template_dir}/semisl") args_h = { @@ -434,7 +475,7 @@ def test_otx_train_cls_decr(self, template, tmp_dir_path): args1["--val-data-roots"] = "tests/assets/datumaro_h-label_class_decremental" args1["--load-weights"] = f"{template_work_dir}/trained_{template.model_template_id}/models/weights.pth" otx_train_testing(template, tmp_dir_path, otx_dir, args1) - + @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) @pytest.mark.parametrize("dump_features", [True, False]) @@ -457,36 +498,48 @@ def test_otx_eval(self, template, tmp_dir_path): @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) def test_otx_explain(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "h_label_cls" otx_explain_testing(template, tmp_dir_path, otx_dir, args_h) @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) def test_otx_explain_all_classes(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "h_label_cls" otx_explain_testing_all_classes(template, tmp_dir_path, otx_dir, args_h) @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) def test_otx_explain_process_saliency_maps(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "h_label_cls" otx_explain_testing_process_saliency_maps(template, tmp_dir_path, otx_dir, args_h) @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) def test_otx_explain_openvino(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "h_label_cls" otx_explain_openvino_testing(template, tmp_dir_path, otx_dir, args_h) @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) def test_otx_explain_all_classes_openvino(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "h_label_cls" otx_explain_all_classes_openvino_testing(template, tmp_dir_path, otx_dir, args_h) @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) def test_otx_explain_process_saliency_maps_openvino(self, template, tmp_dir_path): + if template.name == "DeiT-Tiny": + pytest.skip(reason="Issue#2098 ViT inference does not work by FeatureVectorHook.") tmp_dir_path = tmp_dir_path / "h_label_cls" otx_explain_process_saliency_maps_openvino_testing(template, tmp_dir_path, otx_dir, args_h) @@ -516,4 +569,4 @@ def test_nncf_optimize(self, template, tmp_dir_path): if template.entrypoints.nncf is None: pytest.skip("nncf entrypoint is none") - nncf_optimize_testing(template, tmp_dir_path, otx_dir, args_h) + nncf_optimize_testing(template, tmp_dir_path, otx_dir, args_h) \ No newline at end of file From ddd6238d67cbe8f9bc1472d1de61ea4f639017aa Mon Sep 17 00:00:00 2001 From: sungmanc Date: Mon, 10 Jul 2023 14:05:42 +0900 Subject: [PATCH 6/8] Make balck happy --- tests/integration/cli/classification/test_classification.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/integration/cli/classification/test_classification.py b/tests/integration/cli/classification/test_classification.py index d0629daea66..45a98517284 100644 --- a/tests/integration/cli/classification/test_classification.py +++ b/tests/integration/cli/classification/test_classification.py @@ -335,7 +335,7 @@ def test_otx_train_cls_decr(self, template, tmp_dir_path): args1["--val-data-roots"] = "tests/assets/datumaro_multilabel_class_decremental" args1["--load-weights"] = f"{template_work_dir}/trained_{template.model_template_id}/models/weights.pth" otx_train_testing(template, tmp_dir_path, otx_dir, args1) - + @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) @pytest.mark.parametrize("dump_features", [True, False]) @@ -475,7 +475,7 @@ def test_otx_train_cls_decr(self, template, tmp_dir_path): args1["--val-data-roots"] = "tests/assets/datumaro_h-label_class_decremental" args1["--load-weights"] = f"{template_work_dir}/trained_{template.model_template_id}/models/weights.pth" otx_train_testing(template, tmp_dir_path, otx_dir, args1) - + @e2e_pytest_component @pytest.mark.parametrize("template", default_templates, ids=default_templates_ids) @pytest.mark.parametrize("dump_features", [True, False]) @@ -569,4 +569,4 @@ def test_nncf_optimize(self, template, tmp_dir_path): if template.entrypoints.nncf is None: pytest.skip("nncf entrypoint is none") - nncf_optimize_testing(template, tmp_dir_path, otx_dir, args_h) \ No newline at end of file + nncf_optimize_testing(template, tmp_dir_path, otx_dir, args_h) From 515408584274be31ac9f6ecef92f714e35dac754 Mon Sep 17 00:00:00 2001 From: sungmanc Date: Mon, 10 Jul 2023 16:58:36 +0900 Subject: [PATCH 7/8] Fix typo --- .../classification/adapters/mmcls/datasets/otx_datasets.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/otx/algorithms/classification/adapters/mmcls/datasets/otx_datasets.py b/src/otx/algorithms/classification/adapters/mmcls/datasets/otx_datasets.py index 3d984f8502d..8102362fa76 100644 --- a/src/otx/algorithms/classification/adapters/mmcls/datasets/otx_datasets.py +++ b/src/otx/algorithms/classification/adapters/mmcls/datasets/otx_datasets.py @@ -416,7 +416,8 @@ def evaluate( ) eval_results["MHAcc"] = total_acc - if self.hierarchical_info["num_multiclass_head"] > 0: + breakpoint() + if self.hierarchical_info["num_multiclass_heads"] > 0: eval_results["avgClsAcc"] = total_acc_sl / self.hierarchical_info["num_multiclass_heads"] else: eval_results["avgClsAcc"] = total_acc_sl From 2614f4e232596584116309023766f062455c3061 Mon Sep 17 00:00:00 2001 From: sungmanc Date: Mon, 10 Jul 2023 19:50:17 +0900 Subject: [PATCH 8/8] Fix typoi --- .../classification/adapters/mmcls/datasets/otx_datasets.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/otx/algorithms/classification/adapters/mmcls/datasets/otx_datasets.py b/src/otx/algorithms/classification/adapters/mmcls/datasets/otx_datasets.py index 8102362fa76..70a4500d1b5 100644 --- a/src/otx/algorithms/classification/adapters/mmcls/datasets/otx_datasets.py +++ b/src/otx/algorithms/classification/adapters/mmcls/datasets/otx_datasets.py @@ -416,7 +416,6 @@ def evaluate( ) eval_results["MHAcc"] = total_acc - breakpoint() if self.hierarchical_info["num_multiclass_heads"] > 0: eval_results["avgClsAcc"] = total_acc_sl / self.hierarchical_info["num_multiclass_heads"] else: