diff --git a/CHANGELOG.md b/CHANGELOG.md index 4d772538095..b1c8b790066 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -34,6 +34,21 @@ All notable changes to this project will be documented in this file. - OpenVINO(==2023.0) IR inference is not working well on 2-stage models (e.g. Mask-RCNN) exported from torch>=1.13.1 - NNCF QAT optimization is disabled for MaskRCNN models due to CUDA runtime error in ROIAlign kernel on torch==2.0.1 +## \[v1.4.2\] + +### Enhancements + +- Bump datumaro version to 1.5.0rc0 () +- Set tox version constraint () +- Add model category attributes to model template () + +### Bug fixes + +- Bug fix for albumentations () +- Add workaround for the incorrect meta info M-RCNN (used for XAI) () +- Fix label list order for h-label classification () +- Modified fq numbers for lite HRNET e2e tests () + ## \[v1.4.1\] ### Enhancements diff --git a/docs/source/conf.py b/docs/source/conf.py index 1db0e3778bb..61741e262b6 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -11,6 +11,13 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. # +import os +import sys + +sys.path.insert(0, os.path.abspath("../../src")) + +from otx import __version__ + # ruff: noqa # -- Project information ----------------------------------------------------- # @@ -18,7 +25,7 @@ project = 'OpenVINO™ Training Extensions' copyright = '2023, OpenVINO™ Training Extensions Contributors' author = 'OpenVINO™ Training Extensions Contributors' -release = '1.5.0' +release = __version__ # -- General configuration --------------------------------------------------- # diff --git a/requirements/base.txt b/requirements/base.txt index 4ffd4af053e..c8ae357c188 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -4,7 +4,7 @@ natsort>=6.0.0 prettytable protobuf>=3.20.0 pyyaml -datumaro==1.5.0rc0 +datumaro~=1.5.0 psutil scipy>=1.8 bayesian-optimization>=1.2.0 diff --git a/requirements/openvino.txt b/requirements/openvino.txt index 0324ed22f42..ca71af3da87 100644 --- a/requirements/openvino.txt +++ b/requirements/openvino.txt @@ -1,6 +1,6 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # OpenVINO Requirements. # -nncf==2.5.0 +nncf==2.6.0 onnx==1.13.0 openvino-model-api==0.1.5 openvino==2023.0 diff --git a/src/otx/algorithms/anomaly/tasks/openvino.py b/src/otx/algorithms/anomaly/tasks/openvino.py index 7859cfbfb36..3800b264e0d 100644 --- a/src/otx/algorithms/anomaly/tasks/openvino.py +++ b/src/otx/algorithms/anomaly/tasks/openvino.py @@ -242,7 +242,8 @@ def _populate_metadata_legacy(self, model: ModelEntity) -> Dict[str, Any]: to_tensor=True, ) metadata = { - "transform": transform.to_dict(), + # TODO: Replace with transform.to_dict() when OTX supports albumentations 1.3.0 + "transform": {"transform": transform._to_dict()}, "image_threshold": image_threshold, "pixel_threshold": pixel_threshold, "min": min_value, diff --git a/src/otx/algorithms/classification/adapters/openvino/task.py b/src/otx/algorithms/classification/adapters/openvino/task.py index b14549218f1..91cfb3de5fa 100644 --- a/src/otx/algorithms/classification/adapters/openvino/task.py +++ b/src/otx/algorithms/classification/adapters/openvino/task.py @@ -36,6 +36,7 @@ from otx.algorithms.classification.configs import ClassificationConfig from otx.algorithms.classification.utils import ( get_cls_deploy_config, + get_cls_inferencer_configuration, get_hierarchical_label_list, ) from otx.algorithms.common.utils import OTXOpenVinoDataLoader @@ -113,7 +114,19 @@ def __init__( max_num_requests=num_requests, plugin_config={"PERFORMANCE_HINT": "THROUGHPUT"}, ) - self.model = Model.create_model(model_adapter, "Classification", {}, preload=True) + self.configuration = get_cls_inferencer_configuration(self.label_schema) + + # create a dummy hierarchical config for backward compatibility, which is not actually used + if self.configuration["hierarchical"]: + try: + model_adapter.get_rt_info(["model_info", "hierarchical_config"]) + except RuntimeError: + self.configuration["hierarchical_config"] = json.dumps( + {"cls_heads_info": {"label_to_idx": [], "all_groups": []}, "label_tree_edges": []} + ) + + self.model = Model.create_model(model_adapter, "Classification", self.configuration, preload=True) + self.converter = ClassificationToAnnotationConverter(self.label_schema) self.callback_exceptions: List[Exception] = [] self.model.inference_adapter.set_callback(self._async_callback) diff --git a/src/otx/algorithms/common/tasks/nncf_task.py b/src/otx/algorithms/common/tasks/nncf_task.py index b5f9876f08c..a3810e80370 100644 --- a/src/otx/algorithms/common/tasks/nncf_task.py +++ b/src/otx/algorithms/common/tasks/nncf_task.py @@ -245,7 +245,7 @@ def model_builder( if is_export: compression_ctrl.prepare_for_export() - model.disable_dynamic_graph_building() + model.nncf.disable_dynamic_graph_building() if return_compression_ctrl: return compression_ctrl, model diff --git a/src/otx/api/usecases/exportable_code/demo/requirements.txt b/src/otx/api/usecases/exportable_code/demo/requirements.txt index 061a8546f4e..1a01c373d15 100644 --- a/src/otx/api/usecases/exportable_code/demo/requirements.txt +++ b/src/otx/api/usecases/exportable_code/demo/requirements.txt @@ -1,4 +1,4 @@ openvino==2023.0 openvino-model-api==0.1.5 -otx @ git+https://github.com/sovrasov/openvino_training_extensions/@38a9d7b20e3359d6139616c7127eac98d304c9f6#egg=otx +otx @ git+https://github.com/openvinotoolkit/training_extensions/@e066a04834952257c1c9384a09f472d13b76b264#egg=otx numpy>=1.21.0,<=1.23.5 # np.bool was removed in 1.24.0 which was used in openvino runtime diff --git a/tests/e2e/cli/anomaly/reference/ote_anomaly_classification_padim/compressed_model.yml b/tests/e2e/cli/anomaly/reference/ote_anomaly_classification_padim/compressed_model.yml index ea7fd40ce15..de30556e56d 100644 --- a/tests/e2e/cli/anomaly/reference/ote_anomaly_classification_padim/compressed_model.yml +++ b/tests/e2e/cli/anomaly/reference/ote_anomaly_classification_padim/compressed_model.yml @@ -3,3 +3,5 @@ TestToolsAnomalyClassification: number_of_fakequantizers: 26 ptq: number_of_fakequantizers: 27 + pot: + number_of_fakequantizers: 28 diff --git a/tests/e2e/cli/anomaly/reference/ote_anomaly_detection_padim/compressed_model.yml b/tests/e2e/cli/anomaly/reference/ote_anomaly_detection_padim/compressed_model.yml index 89c0145b70b..1c36c166294 100644 --- a/tests/e2e/cli/anomaly/reference/ote_anomaly_detection_padim/compressed_model.yml +++ b/tests/e2e/cli/anomaly/reference/ote_anomaly_detection_padim/compressed_model.yml @@ -3,3 +3,5 @@ TestToolsAnomalyDetection: number_of_fakequantizers: 26 ptq: number_of_fakequantizers: 27 + pot: + number_of_fakequantizers: 28 diff --git a/tests/e2e/cli/anomaly/reference/ote_anomaly_segmentation_padim/compressed_model.yml b/tests/e2e/cli/anomaly/reference/ote_anomaly_segmentation_padim/compressed_model.yml index d7a68adacff..c717880a0f6 100644 --- a/tests/e2e/cli/anomaly/reference/ote_anomaly_segmentation_padim/compressed_model.yml +++ b/tests/e2e/cli/anomaly/reference/ote_anomaly_segmentation_padim/compressed_model.yml @@ -3,3 +3,5 @@ TestToolsAnomalySegmentation: number_of_fakequantizers: 26 ptq: number_of_fakequantizers: 27 + pot: + number_of_fakequantizers: 28 diff --git a/tests/e2e/cli/instance_segmentation/reference/Custom_Counting_Instance_Segmentation_MaskRCNN_EfficientNetB2B/compressed_model.yml b/tests/e2e/cli/instance_segmentation/reference/Custom_Counting_Instance_Segmentation_MaskRCNN_EfficientNetB2B/compressed_model.yml index 4ed59afdb94..7a937a070dc 100644 --- a/tests/e2e/cli/instance_segmentation/reference/Custom_Counting_Instance_Segmentation_MaskRCNN_EfficientNetB2B/compressed_model.yml +++ b/tests/e2e/cli/instance_segmentation/reference/Custom_Counting_Instance_Segmentation_MaskRCNN_EfficientNetB2B/compressed_model.yml @@ -3,8 +3,12 @@ TestToolsOTXInstanceSegmentation: number_of_fakequantizers: 204 ptq: number_of_fakequantizers: 174 + pot: + number_of_fakequantizers: 137 TestToolsTilingInstanceSegmentation: nncf: number_of_fakequantizers: 204 ptq: number_of_fakequantizers: 174 + pot: + number_of_fakequantizers: 137 diff --git a/tests/e2e/cli/instance_segmentation/reference/Custom_Counting_Instance_Segmentation_MaskRCNN_ResNet50/compressed_model.yml b/tests/e2e/cli/instance_segmentation/reference/Custom_Counting_Instance_Segmentation_MaskRCNN_ResNet50/compressed_model.yml index 8bace97530a..620feec9b0e 100644 --- a/tests/e2e/cli/instance_segmentation/reference/Custom_Counting_Instance_Segmentation_MaskRCNN_ResNet50/compressed_model.yml +++ b/tests/e2e/cli/instance_segmentation/reference/Custom_Counting_Instance_Segmentation_MaskRCNN_ResNet50/compressed_model.yml @@ -3,8 +3,12 @@ TestToolsOTXInstanceSegmentation: number_of_fakequantizers: 97 ptq: number_of_fakequantizers: 136 + pot: + number_of_fakequantizers: 99 TestToolsTilingInstanceSegmentation: nncf: number_of_fakequantizers: 97 ptq: number_of_fakequantizers: 136 + pot: + number_of_fakequantizers: 99 diff --git a/tests/unit/core/data/test_helpers.py b/tests/unit/core/data/test_helpers.py index f2f47de6990..6bc973c4159 100644 --- a/tests/unit/core/data/test_helpers.py +++ b/tests/unit/core/data/test_helpers.py @@ -6,8 +6,12 @@ import os import cv2 -import datumaro as dm import numpy as np +from datumaro.components.annotation import Label, Bbox, Mask +from datumaro.components.dataset import Dataset +from datumaro.components.dataset_base import DatasetItem +from datumaro.components.media import ImageFromFile, ImageFromNumpy + from otx.api.entities.model_template import TaskType @@ -107,7 +111,7 @@ def generate_datumaro_dataset_item( image_shape: np.array = np.array((5, 5, 3)), mask_shape: np.array = np.array((5, 5)), temp_dir: Optional[str] = None, -) -> dm.DatasetItem: +) -> DatasetItem: """Generate Datumaro DatasetItem. Args: @@ -119,20 +123,22 @@ def generate_datumaro_dataset_item( temp_dir (str): directory to save image data Returns: - dm.DatasetItem: Datumaro DatasetItem + DatasetItem: Datumaro DatasetItem """ ann_task_dict = { - "classification": dm.Label(label=0), - "detection": dm.Bbox(1, 2, 3, 4, label=0), - "segmentation": dm.Mask(np.zeros(mask_shape)), + "classification": Label(label=0), + "detection": Bbox(1, 2, 3, 4, label=0), + "segmentation": Mask(np.zeros(mask_shape)), } if temp_dir: path = os.path.join(temp_dir, "image.png") cv2.imwrite(path, np.ones(image_shape)) - return dm.DatasetItem(id=item_id, subset=subset, image=path, annotations=[ann_task_dict[task]]) + return DatasetItem(id=item_id, subset=subset, media=ImageFromFile(path), annotations=[ann_task_dict[task]]) - return dm.DatasetItem(id=item_id, subset=subset, image=np.ones(image_shape), annotations=[ann_task_dict[task]]) + return DatasetItem( + id=item_id, subset=subset, media=ImageFromNumpy(np.ones(image_shape)), annotations=[ann_task_dict[task]] + ) def generate_datumaro_dataset( @@ -141,7 +147,7 @@ def generate_datumaro_dataset( num_data: int = 1, image_shape: np.array = np.array((5, 5, 3)), mask_shape: np.array = np.array((5, 5)), -) -> dm.Dataset: +) -> Dataset: """Generate Datumaro Dataset. Args: @@ -154,7 +160,7 @@ def generate_datumaro_dataset( Returns: dm.Dataset: Datumaro Dataset """ - dataset_items: dm.DatasetItem = [] + dataset_items: DatasetItem = [] for subset in subsets: for idx in range(num_data): dataset_items.append( @@ -166,4 +172,4 @@ def generate_datumaro_dataset( mask_shape=mask_shape, ) ) - return dm.Dataset.from_iterable(dataset_items, categories=["cat", "dog"]) + return Dataset.from_iterable(dataset_items, categories=["cat", "dog"])