From 520b8c7302432dd9fe0a72dc003389e8c58fc706 Mon Sep 17 00:00:00 2001 From: Duc Thinh Ngo Date: Wed, 28 Feb 2024 01:01:54 +0100 Subject: [PATCH 1/8] change Dockerfile to fix #1775 --- .ci/Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.ci/Dockerfile b/.ci/Dockerfile index ef8b5e340b..219ad3a65d 100644 --- a/.ci/Dockerfile +++ b/.ci/Dockerfile @@ -62,8 +62,8 @@ RUN conda install python=3.10 FROM python_base_cuda11.4 as anomalib_development_env # Install all anomalib requirements -COPY ./requirements/base.txt /tmp/anomalib/requirements/base.txt -RUN pip install --no-cache-dir -r /tmp/anomalib/requirements/base.txt +COPY ./requirements/installer.txt /tmp/anomalib/requirements/installer.txt +RUN pip install --no-cache-dir -r /tmp/anomalib/requirements/installer.txt COPY ./requirements/openvino.txt /tmp/anomalib/requirements/openvino.txt RUN pip install --no-cache-dir -r /tmp/anomalib/requirements/openvino.txt From 3e84d8bc9a01c869320a7bf1c27f53624cd2c5f5 Mon Sep 17 00:00:00 2001 From: Duc Thinh Ngo Date: Fri, 1 Mar 2024 17:38:10 +0100 Subject: [PATCH 2/8] Move export functionality to AnomalyModule --- src/anomalib/data/utils/path.py | 5 +- src/anomalib/deploy/__init__.py | 5 - src/anomalib/deploy/export.py | 416 ------------------ src/anomalib/engine/engine.py | 12 +- src/anomalib/models/__init__.py | 3 +- src/anomalib/models/components/__init__.py | 3 +- .../models/components/base/__init__.py | 4 +- .../models/components/base/anomaly_module.py | 378 +++++++++++++++- tests/integration/cli/test_cli.py | 2 +- tests/integration/model/test_models.py | 3 +- .../tools/test_gradio_entrypoint.py | 8 +- .../tools/test_openvino_entrypoint.py | 4 +- .../tools/test_torch_entrypoint.py | 4 +- tests/unit/data/utils/test_image.py | 8 +- tests/unit/data/utils/test_path.py | 4 +- tests/unit/deploy/test_inferencer.py | 4 +- 16 files changed, 405 insertions(+), 458 deletions(-) delete mode 100644 src/anomalib/deploy/export.py diff --git a/src/anomalib/data/utils/path.py b/src/anomalib/data/utils/path.py index 6b0c0c2c82..18232089da 100644 --- a/src/anomalib/data/utils/path.py +++ b/src/anomalib/data/utils/path.py @@ -6,6 +6,7 @@ import os import re +import warnings from enum import Enum from pathlib import Path @@ -213,8 +214,8 @@ def validate_path(path: str | Path, base_dir: str | Path | None = None, should_e # Check if the resolved path is within the base directory if not str(path).startswith(str(base_dir)): - msg = "Access denied: Path is outside the allowed directory" - raise ValueError(msg) + msg = "Path is outside the secured directory" + warnings.warn(msg, UserWarning, stacklevel=1) # In case path ``should_exist``, the path is valid, and should be # checked for read and execute permissions. diff --git a/src/anomalib/deploy/__init__.py b/src/anomalib/deploy/__init__.py index 1e98d69e10..b622b644a8 100644 --- a/src/anomalib/deploy/__init__.py +++ b/src/anomalib/deploy/__init__.py @@ -3,15 +3,10 @@ # Copyright (C) 2022-2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -from .export import ExportType, export_to_onnx, export_to_openvino, export_to_torch from .inferencers import Inferencer, OpenVINOInferencer, TorchInferencer __all__ = [ - "ExportType", "Inferencer", "OpenVINOInferencer", "TorchInferencer", - "export_to_onnx", - "export_to_openvino", - "export_to_torch", ] diff --git a/src/anomalib/deploy/export.py b/src/anomalib/deploy/export.py deleted file mode 100644 index 23f0126421..0000000000 --- a/src/anomalib/deploy/export.py +++ /dev/null @@ -1,416 +0,0 @@ -"""Utilities for optimization and OpenVINO conversion.""" - -# Copyright (C) 2022-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - - -import json -import logging -from enum import Enum -from pathlib import Path -from typing import TYPE_CHECKING, Any - -import albumentations as A # noqa: N812 -import numpy as np -import torch - -from anomalib import TaskType -from anomalib.data import AnomalibDataModule, AnomalibDataset -from anomalib.models.components import AnomalyModule -from anomalib.utils.exceptions import try_import - -if TYPE_CHECKING: - from torch.types import Number - -logger = logging.getLogger("anomalib") - -if try_import("openvino"): - from openvino.runtime import serialize - from openvino.tools.ovc import convert_model - - -class ExportType(str, Enum): - """Model export type. - - Examples: - >>> from anomalib.deploy import ExportType - >>> ExportType.ONNX - 'onnx' - >>> ExportType.OPENVINO - 'openvino' - >>> ExportType.TORCH - 'torch' - """ - - ONNX = "onnx" - OPENVINO = "openvino" - TORCH = "torch" - - -def export_to_torch( - model: AnomalyModule, - export_root: Path | str, - transform: dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose, - task: TaskType | None = None, -) -> Path: - """Export AnomalibModel to torch. - - Args: - model (AnomalyModule): Model to export. - export_root (Path): Path to the output folder. - transform (dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose): Data transforms (augmentations) - used for the model. When using ``dict``, ensure that the transform dict is in the format required by - Albumentations. - task (TaskType | None): Task type should be provided if transforms is of type dict or A.Compose object. - Defaults to ``None``. - - Returns: - Path: Path to the exported pytorch model. - - Examples: - Assume that we have a model to train and we want to export it to torch format. - - >>> from anomalib.data import Visa - >>> from anomalib.models import Patchcore - >>> from anomalib.engine import Engine - ... - >>> datamodule = Visa() - >>> model = Patchcore() - >>> engine = Engine() - ... - >>> engine.fit(model, datamodule) - - Now that we have a model trained, we can export it to torch format. - - >>> from anomalib.deploy import export_to_torch - ... - >>> export_to_torch( - ... model=model, - ... export_root="path/to/export", - ... transform=datamodule.test_data.transform, - ... task=datamodule.test_data.task, - ... ) - """ - export_root = _create_export_root(export_root, ExportType.TORCH) - metadata = get_metadata(task=task, transform=transform, model=model) - pt_model_path = export_root / "model.pt" - torch.save( - obj={"model": model.model, "metadata": metadata}, - f=pt_model_path, - ) - return pt_model_path - - -def export_to_onnx( - model: AnomalyModule, - input_size: tuple[int, int], - export_root: Path | str, - transform: dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose, - task: TaskType | None = None, - export_type: ExportType = ExportType.ONNX, -) -> Path: - """Export model to onnx. - - Args: - model (AnomalyModule): Model to export. - input_size (list[int] | tuple[int, int]): Image size used as the input for onnx converter. - export_root (Path): Path to the root folder of the exported model. - transform (dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose): Data transforms (augmentations) - used for the model. When using dict, ensure that the transform dict is in the format required by - Albumentations. - task (TaskType | None): Task type should be provided if transforms is of type dict or A.Compose object. - Defaults to ``None``. - export_type (ExportType): Mode to export the model. Since this method is used by OpenVINO export as well, we - need to pass the export type so that the right export path is created. - Defaults to ``ExportType.ONNX``. - - Returns: - Path: Path to the exported onnx model. - - Examples: - Export the Lightning Model to ONNX: - - >>> from anomalib.models import Patchcore - >>> from anomalib.data import Visa - >>> from anomalib.deploy import export_to_onnx - ... - >>> datamodule = Visa() - >>> model = Patchcore() - ... - >>> export_to_onnx( - ... model=model, - ... input_size=(224, 224), - ... export_root="path/to/export", - ... transform=datamodule.test_data.transform, - ... task=datamodule.test_data.task - ... ) - - Using Custom Transforms: - This example shows how to use a custom ``Compose`` object for the ``transform`` argument. - - >>> import albumentations as A - >>> transform = A.Compose([A.Resize(224, 224), A.pytorch.ToTensorV2()]) - ... - >>> export_to_onnx( - ... model=model, - ... input_size=(224, 224), - ... export_root="path/to/export", - ... transform=transform, - ... task="segmentation", - ... ) - """ - export_root = _create_export_root(export_root, export_type) - _write_metadata_to_json(export_root, transform, model, task) - onnx_path = export_root / "model.onnx" - torch.onnx.export( - model.model, - torch.zeros((1, 3, *input_size)).to(model.device), - str(onnx_path), - opset_version=14, - dynamic_axes={"input": {0: "batch_size"}, "output": {0: "batch_size"}}, - input_names=["input"], - output_names=["output"], - ) - - return onnx_path - - -def export_to_openvino( - export_root: Path | str, - model: AnomalyModule, - input_size: tuple[int, int], - transform: dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose, - ov_args: dict[str, Any] | None = None, - task: TaskType | None = None, -) -> Path: - """Convert onnx model to OpenVINO IR. - - Args: - export_root (Path): Path to the export folder. - model (AnomalyModule): AnomalyModule to export. - input_size (tuple[int, int]): Input size of the model. Used for adding metadata to the IR. - transform (dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose): Data transforms (augmentations) - used for the model. When using dict, ensure that the transform dict is in the format required by - Albumentations. - ov_args: Model optimizer arguments for OpenVINO model conversion. - Defaults to ``None``. - task (TaskType | None): Task type should be provided if transforms is of type dict or A.Compose object. - Defaults to ``None``. - - Returns: - Path: Path to the exported onnx model. - - Raises: - ModuleNotFoundError: If OpenVINO is not installed. - - Returns: - Path: Path to the exported OpenVINO IR. - - Examples: - Export the Lightning Model to OpenVINO IR: - This example demonstrates how to export the Lightning Model to OpenVINO IR. - - >>> from anomalib.models import Patchcore - >>> from anomalib.data import Visa - >>> from anomalib.deploy import export_to_openvino - ... - >>> datamodule = Visa() - >>> model = Patchcore() - ... - >>> export_to_openvino( - ... export_root="path/to/export", - ... model=model, - ... input_size=(224, 224), - ... transform=datamodule.test_data.transform, - ... task=datamodule.test_data.task - ... ) - - Using Custom Transforms: - This example shows how to use a custom ``Compose`` object for the ``transform`` argument. - - >>> import albumentations as A - >>> transform = A.Compose([A.Resize(224, 224), A.pytorch.ToTensorV2()]) - ... - >>> export_to_openvino( - ... export_root="path/to/export", - ... model=model, - ... input_size=(224, 224), - ... transform=transform, - ... task="segmentation", - ... ) - - """ - model_path = export_to_onnx(model, input_size, export_root, transform, task, ExportType.OPENVINO) - ov_model_path = model_path.with_suffix(".xml") - ov_args = {} if ov_args is None else ov_args - if convert_model is not None and serialize is not None: - model = convert_model(model_path, **ov_args) - serialize(model, ov_model_path) - else: - logger.exception("Could not find OpenVINO methods. Please check OpenVINO installation.") - raise ModuleNotFoundError - return ov_model_path - - -def get_metadata( - model: AnomalyModule, - transform: dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose, - task: TaskType | None = None, -) -> dict[str, Any]: - """Get metadata for the exported model. - - Args: - model (AnomalyModule): Anomaly model which contains metadata related to normalization. - transform (dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose): Data transforms (augmentations - for the model. When using dict, ensure that the transform dict is in the format required by - Albumentations. - task (TaskType | None): Task type should be provided if transforms is of type dict or A.Compose object. - Defaults to None. - - Returns: - dict[str, Any]: Metadata for the exported model. - """ - transform = _get_transform_dict(transform) - task = _get_task(task=task, transform=transform) - - data_metadata = {"task": task, "transform": transform} - model_metadata = _get_model_metadata(model) - metadata = {**data_metadata, **model_metadata} - - # Convert torch tensors to python lists or values for json serialization. - for key, value in metadata.items(): - if isinstance(value, torch.Tensor): - metadata[key] = value.numpy().tolist() - - return metadata - - -def _get_model_metadata(model: AnomalyModule) -> dict[str, torch.Tensor]: - """Get meta data related to normalization from model. - - Args: - model (AnomalyModule): Anomaly model which contains metadata related to normalization. - - Returns: - dict[str, torch.Tensor]: Model metadata - """ - metadata = {} - cached_metadata: dict[str, Number | torch.Tensor] = {} - for threshold_name in ("image_threshold", "pixel_threshold"): - if hasattr(model, threshold_name): - cached_metadata[threshold_name] = getattr(model, threshold_name).cpu().value.item() - if hasattr(model, "normalization_metrics") and model.normalization_metrics.state_dict() is not None: - for key, value in model.normalization_metrics.state_dict().items(): - cached_metadata[key] = value.cpu() - # Remove undefined values by copying in a new dict - for key, val in cached_metadata.items(): - if not np.isinf(val).all(): - metadata[key] = val - del cached_metadata - return metadata - - -def _get_task( - transform: dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose, - task: TaskType | None = None, -) -> TaskType: - """Get task from transform or task. - - Args: - transform (dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose): If task is None, task is taken - from transform. - task (TaskType | None): Task type. Defaults to None. - - Raises: - ValueError: If task is None and transform is not of type AnomalibDataset or AnomalibDataModule. - - Returns: - TaskType: Task type. - """ - _task = task - if _task is None: - if isinstance(transform, AnomalibDataset): - _task = transform.task - elif isinstance(transform, AnomalibDataModule): - _task = transform.test_data.task - else: - logging.error(f"Task should be provided when passing transform of type {type(transform)}") - raise ValueError - return _task - - -def _get_transform_dict( - transform_container: dict[str, Any] | AnomalibDataModule | AnomalibDataset | A.Compose, -) -> dict[str, Any]: - """Get transform dict from transform_container. - - Args: - transform_container (dict[str, Any] | AnomalibDataModule | AnomalibDataset | A.Compose): Transform dict - or AnomalibDataModule or AnomalibDataset or A.Compose object. Transform is taken from container. When using - AnomalibDataModule or AnomalibDataset, the task is also taken from the container. When passing - transform_container as dict, ensure that the transform dict is in the format required by Albumentations. - - Raises: - KeyError: If transform_container is dict and does not contain the required keys. - TypeError: If transform_container is not dict, AnomalibDataModule or AnomalibDataset or A.Compose object. - - Returns: - dict[str, Any]: Transform dict. - """ - if isinstance(transform_container, dict): - try: - A.from_dict(transform_container) - transform = transform_container - except KeyError as exception: - logging.exception( - f"Unsupported transform: {transform_container}." - " Ensure that the transform dict is in the format required by Albumentations.", - ) - raise KeyError from exception - elif isinstance(transform_container, A.Compose): - transform = transform_container.to_dict() - elif isinstance(transform_container, AnomalibDataset): - transform = transform_container.transform.to_dict() - elif isinstance(transform_container, AnomalibDataModule): - transform = transform_container.test_data.transform.to_dict() - else: - logging.error(f"Unsupported type for transform_container: {type(transform_container)}") - raise TypeError - - return transform - - -def _create_export_root(export_root: str | Path, export_type: ExportType) -> Path: - """Create export directory. - - Args: - export_root (str | Path): Path to the root folder of the exported model. - export_type (ExportType): Mode to export the model. Torch, ONNX or OpenVINO. - - Returns: - Path: Path to the export directory. - """ - export_root = Path(export_root) / "weights" / export_type.value - export_root.mkdir(parents=True, exist_ok=True) - return export_root - - -def _write_metadata_to_json( - export_root: Path, - transform: dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose, - model: AnomalyModule, - task: TaskType | None = None, -) -> None: - """Write metadata to json file. - - Args: - export_root (Path): Path to the exported model. - transform (dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose): Data transforms (augmentations) - used for the model. - model (AnomalyModule): AnomalyModule to export. - task (TaskType | None): Task type should be provided if transforms is of type dict or A.Compose object. - Defaults to None. - """ - metadata = get_metadata(task=task, transform=transform, model=model) - with (export_root / "metadata.json").open("w", encoding="utf-8") as metadata_file: - json.dump(metadata, metadata_file, ensure_ascii=False, indent=4) diff --git a/src/anomalib/engine/engine.py b/src/anomalib/engine/engine.py index e35967aec0..e1dd7a7647 100644 --- a/src/anomalib/engine/engine.py +++ b/src/anomalib/engine/engine.py @@ -22,8 +22,7 @@ from anomalib.callbacks.thresholding import _ThresholdCallback from anomalib.callbacks.visualizer import _VisualizationCallback from anomalib.data import AnomalibDataModule, AnomalibDataset, PredictDataset -from anomalib.deploy.export import ExportType, export_to_onnx, export_to_openvino, export_to_torch -from anomalib.models import AnomalyModule +from anomalib.models import AnomalyModule, ExportType from anomalib.utils.normalization import NormalizationMethod from anomalib.utils.types import NORMALIZATION, THRESHOLD from anomalib.utils.visualization import BaseVisualizer @@ -751,16 +750,14 @@ def export( exported_model_path: Path | None = None if export_type == ExportType.TORCH: - exported_model_path = export_to_torch( - model=model, + exported_model_path = model.to_torch( export_root=export_root, transform=transform, task=self.task, ) elif export_type == ExportType.ONNX: assert input_size is not None, "input_size must be provided for ONNX export." - exported_model_path = export_to_onnx( - model=model, + exported_model_path = model.to_onnx( input_size=input_size, export_root=export_root, transform=transform, @@ -768,8 +765,7 @@ def export( ) elif export_type == ExportType.OPENVINO: assert input_size is not None, "input_size must be provided for OpenVINO export." - exported_model_path = export_to_openvino( - model=model, + exported_model_path = model.to_openvino( input_size=input_size, export_root=export_root, transform=transform, diff --git a/src/anomalib/models/__init__.py b/src/anomalib/models/__init__.py index bcd4c84a8d..d3d8275c68 100644 --- a/src/anomalib/models/__init__.py +++ b/src/anomalib/models/__init__.py @@ -11,7 +11,7 @@ from jsonargparse import Namespace from omegaconf import DictConfig, OmegaConf -from anomalib.models.components import AnomalyModule +from anomalib.models.components import AnomalyModule, ExportType from .image import ( Cfa, @@ -58,6 +58,7 @@ class UnknownModelError(ModuleNotFoundError): "Uflow", "AiVad", "WinClip", + "ExportType", ] logger = logging.getLogger(__name__) diff --git a/src/anomalib/models/components/__init__.py b/src/anomalib/models/components/__init__.py index 21a3fbf5de..8061538a7f 100644 --- a/src/anomalib/models/components/__init__.py +++ b/src/anomalib/models/components/__init__.py @@ -3,7 +3,7 @@ # Copyright (C) 2022-2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -from .base import AnomalyModule, BufferListMixin, DynamicBufferModule, MemoryBankMixin +from .base import AnomalyModule, BufferListMixin, DynamicBufferModule, ExportType, MemoryBankMixin from .dimensionality_reduction import PCA, SparseRandomProjection from .feature_extractors import TimmFeatureExtractor, TorchFXFeatureExtractor from .filters import GaussianBlur2d @@ -23,4 +23,5 @@ "SparseRandomProjection", "TimmFeatureExtractor", "TorchFXFeatureExtractor", + "ExportType", ] diff --git a/src/anomalib/models/components/base/__init__.py b/src/anomalib/models/components/base/__init__.py index b2fc5bd7dc..37f904969e 100644 --- a/src/anomalib/models/components/base/__init__.py +++ b/src/anomalib/models/components/base/__init__.py @@ -3,9 +3,9 @@ # Copyright (C) 2022-2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -from .anomaly_module import AnomalyModule +from .anomaly_module import AnomalyModule, ExportType from .buffer_list import BufferListMixin from .dynamic_module import DynamicBufferModule from .memory_bank_module import MemoryBankMixin -__all__ = ["AnomalyModule", "BufferListMixin", "DynamicBufferModule", "MemoryBankMixin"] +__all__ = ["AnomalyModule", "BufferListMixin", "DynamicBufferModule", "MemoryBankMixin", "ExportType"] diff --git a/src/anomalib/models/components/base/anomaly_module.py b/src/anomalib/models/components/base/anomaly_module.py index 3bfbe2e3ea..d13d7e4f60 100644 --- a/src/anomalib/models/components/base/anomaly_module.py +++ b/src/anomalib/models/components/base/anomaly_module.py @@ -4,26 +4,55 @@ # SPDX-License-Identifier: Apache-2.0 import importlib +import json import logging from abc import ABC, abstractproperty from collections import OrderedDict +from enum import Enum +from pathlib import Path from typing import TYPE_CHECKING, Any +import albumentations as A # noqa: N812 import lightning.pytorch as pl +import numpy as np import torch from lightning.pytorch.utilities.types import STEP_OUTPUT from torch import nn -from anomalib import LearningType +from anomalib import LearningType, TaskType +from anomalib.data import AnomalibDataModule, AnomalibDataset from anomalib.metrics import AnomalibMetricCollection from anomalib.metrics.threshold import BaseThreshold +from anomalib.utils.exceptions import try_import if TYPE_CHECKING: from lightning.pytorch.callbacks import Callback + from torch.types import Number from torchmetrics import Metric logger = logging.getLogger(__name__) +if try_import("openvino"): + from openvino.runtime import serialize + from openvino.tools.ovc import convert_model + + +class ExportType(str, Enum): + """Model export type. + + Examples: + >>> from anomalib.models import ExportType + >>> ExportType.ONNX + 'onnx' + >>> ExportType.OPENVINO + 'openvino' + >>> ExportType.TORCH + 'torch' + """ + + ONNX = "onnx" + OPENVINO = "openvino" + TORCH = "torch" class AnomalyModule(pl.LightningModule, ABC): @@ -179,3 +208,350 @@ def _get_instance(self, state_dict: OrderedDict[str, Any], dict_key: str) -> Bas def learning_type(self) -> LearningType: """Learning type of the model.""" raise NotImplementedError + + def to_torch( + self, + export_root: Path | str, + transform: dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose, + task: TaskType | None = None, + ) -> Path: + """Export AnomalibModel to torch. + + Args: + export_root (Path): Path to the output folder. + transform (dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose): Data transforms + (augmentations) used for the model. When using ``dict``, ensure that the transform dict is in the format + required by Albumentations. + task (TaskType | None): Task type should be provided if transforms is of type dict or A.Compose object. + Defaults to ``None``. + + Returns: + Path: Path to the exported pytorch model. + + Examples: + Assume that we have a model to train and we want to export it to torch format. + + >>> from anomalib.data import Visa + >>> from anomalib.models import Patchcore + >>> from anomalib.engine import Engine + ... + >>> datamodule = Visa() + >>> model = Patchcore() + >>> engine = Engine() + ... + >>> engine.fit(model, datamodule) + + Now that we have a model trained, we can export it to torch format. + + >>> model.to_torch( + ... export_root="path/to/export", + ... transform=datamodule.test_data.transform, + ... task=datamodule.test_data.task, + ... ) + """ + export_root = _create_export_root(export_root, ExportType.TORCH) + metadata = self.get_metadata(task=task, transform=transform) + pt_model_path = export_root / "model.pt" + torch.save( + obj={"model": self.model, "metadata": metadata}, + f=pt_model_path, + ) + return pt_model_path + + def to_onnx( + self, + input_size: tuple[int, int], + export_root: Path | str, + transform: dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose, + task: TaskType | None = None, + export_type: ExportType = ExportType.ONNX, + ) -> Path: + """Export model to onnx. + + Args: + input_size (list[int] | tuple[int, int]): Image size used as the input for onnx converter. + export_root (Path): Path to the root folder of the exported model. + transform (dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose): Data transforms + (augmentations) used for the model. When using dict, ensure that the transform dict is in the format + required by Albumentations. + task (TaskType | None): Task type should be provided if transforms is of type dict or A.Compose object. + Defaults to ``None``. + export_type (ExportType): Mode to export the model. Since this method is used by OpenVINO export as well, we + need to pass the export type so that the right export path is created. + Defaults to ``ExportType.ONNX``. + + Returns: + Path: Path to the exported onnx model. + + Examples: + Export the Lightning Model to ONNX: + + >>> from anomalib.models import Patchcore + >>> from anomalib.data import Visa + ... + >>> datamodule = Visa() + >>> model = Patchcore() + ... + >>> model.to_onnx( + ... input_size=(224, 224), + ... export_root="path/to/export", + ... transform=datamodule.test_data.transform, + ... task=datamodule.test_data.task + ... ) + + Using Custom Transforms: + This example shows how to use a custom ``Compose`` object for the ``transform`` argument. + + >>> import albumentations as A + >>> transform = A.Compose([A.Resize(224, 224), A.pytorch.ToTensorV2()]) + ... + >>> model.to_onnx( + ... input_size=(224, 224), + ... export_root="path/to/export", + ... transform=transform, + ... task="segmentation", + ... ) + """ + export_root = _create_export_root(export_root, export_type) + self._write_metadata_to_json(export_root, transform, task) + onnx_path = export_root / "model.onnx" + torch.onnx.export( + self.model, + torch.zeros((1, 3, *input_size)).to(self.device), + str(onnx_path), + opset_version=14, + dynamic_axes={"input": {0: "batch_size"}, "output": {0: "batch_size"}}, + input_names=["input"], + output_names=["output"], + ) + + return onnx_path + + def to_openvino( + self, + export_root: Path | str, + input_size: tuple[int, int], + transform: dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose, + ov_args: dict[str, Any] | None = None, + task: TaskType | None = None, + ) -> Path: + """Convert onnx model to OpenVINO IR. + + Args: + export_root (Path): Path to the export folder. + input_size (tuple[int, int]): Input size of the model. Used for adding metadata to the IR. + transform (dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose): Data transforms + (augmentations) used for the model. When using dict, ensure that the transform dict is in the format + required by Albumentations. + ov_args: Model optimizer arguments for OpenVINO model conversion. + Defaults to ``None``. + task (TaskType | None): Task type should be provided if transforms is of type dict or A.Compose object. + Defaults to ``None``. + + Returns: + Path: Path to the exported onnx model. + + Raises: + ModuleNotFoundError: If OpenVINO is not installed. + + Returns: + Path: Path to the exported OpenVINO IR. + + Examples: + Export the Lightning Model to OpenVINO IR: + This example demonstrates how to export the Lightning Model to OpenVINO IR. + + >>> from anomalib.models import Patchcore + >>> from anomalib.data import Visa + ... + >>> datamodule = Visa() + >>> model = Patchcore() + ... + >>> model.to_openvino( + ... export_root="path/to/export", + ... input_size=(224, 224), + ... transform=datamodule.test_data.transform, + ... task=datamodule.test_data.task + ... ) + + Using Custom Transforms: + This example shows how to use a custom ``Compose`` object for the ``transform`` argument. + + >>> import albumentations as A + >>> transform = A.Compose([A.Resize(224, 224), A.pytorch.ToTensorV2()]) + ... + >>> model.to_openvino( + ... export_root="path/to/export", + ... input_size=(224, 224), + ... transform=transform, + ... task="segmentation", + ... ) + + """ + model_path = self.to_onnx(input_size, export_root, transform, task, ExportType.OPENVINO) + ov_model_path = model_path.with_suffix(".xml") + ov_args = {} if ov_args is None else ov_args + if convert_model is not None and serialize is not None: + model = convert_model(model_path, **ov_args) + serialize(model, ov_model_path) + else: + logger.exception("Could not find OpenVINO methods. Please check OpenVINO installation.") + raise ModuleNotFoundError + return ov_model_path + + def get_metadata( + self, + transform: dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose, + task: TaskType | None = None, + ) -> dict[str, Any]: + """Get metadata for the exported model. + + Args: + transform (dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose): Data transforms + (augmentations) for the model. When using dict, ensure that the transform dict is in the format + required by Albumentations. + task (TaskType | None): Task type should be provided if transforms is of type dict or A.Compose object. + Defaults to None. + + Returns: + dict[str, Any]: Metadata for the exported model. + """ + transform = _get_transform_dict(transform) + task = _get_task(task=task, transform=transform) + + data_metadata = {"task": task, "transform": transform} + model_metadata = self._get_model_metadata() + metadata = {**data_metadata, **model_metadata} + + # Convert torch tensors to python lists or values for json serialization. + for key, value in metadata.items(): + if isinstance(value, torch.Tensor): + metadata[key] = value.numpy().tolist() + + return metadata + + def _get_model_metadata(self) -> dict[str, torch.Tensor]: + """Get meta data related to normalization from model. + + Returns: + dict[str, torch.Tensor]: Model metadata + """ + metadata = {} + cached_metadata: dict[str, Number | torch.Tensor] = {} + for threshold_name in ("image_threshold", "pixel_threshold"): + if hasattr(self, threshold_name): + cached_metadata[threshold_name] = getattr(self, threshold_name).cpu().value.item() + if hasattr(self, "normalization_metrics") and self.normalization_metrics.state_dict() is not None: + for key, value in self.normalization_metrics.state_dict().items(): + cached_metadata[key] = value.cpu() + # Remove undefined values by copying in a new dict + for key, val in cached_metadata.items(): + if not np.isinf(val).all(): + metadata[key] = val + del cached_metadata + return metadata + + def _write_metadata_to_json( + self, + export_root: Path, + transform: dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose, + task: TaskType | None = None, + ) -> None: + """Write metadata to json file. + + Args: + export_root (Path): Path to the exported model. + transform (dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose): Data transforms + (augmentations) used for the model. + model (AnomalyModule): AnomalyModule to export. + task (TaskType | None): Task type should be provided if transforms is of type dict or A.Compose object. + Defaults to None. + """ + metadata = self.get_metadata(task=task, transform=transform) + with (export_root / "metadata.json").open("w", encoding="utf-8") as metadata_file: + json.dump(metadata, metadata_file, ensure_ascii=False, indent=4) + + +def _get_task( + transform: dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose, + task: TaskType | None = None, +) -> TaskType: + """Get task from transform or task. + + Args: + transform (dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose): If task is None, task is taken + from transform. + task (TaskType | None): Task type. Defaults to None. + + Raises: + ValueError: If task is None and transform is not of type AnomalibDataset or AnomalibDataModule. + + Returns: + TaskType: Task type. + """ + _task = task + if _task is None: + if isinstance(transform, AnomalibDataset): + _task = transform.task + elif isinstance(transform, AnomalibDataModule): + _task = transform.test_data.task + else: + logging.error(f"Task should be provided when passing transform of type {type(transform)}") + raise ValueError + return _task + + +def _get_transform_dict( + transform_container: dict[str, Any] | AnomalibDataModule | AnomalibDataset | A.Compose, +) -> dict[str, Any]: + """Get transform dict from transform_container. + + Args: + transform_container (dict[str, Any] | AnomalibDataModule | AnomalibDataset | A.Compose): Transform dict + or AnomalibDataModule or AnomalibDataset or A.Compose object. Transform is taken from container. When using + AnomalibDataModule or AnomalibDataset, the task is also taken from the container. When passing + transform_container as dict, ensure that the transform dict is in the format required by Albumentations. + + Raises: + KeyError: If transform_container is dict and does not contain the required keys. + TypeError: If transform_container is not dict, AnomalibDataModule or AnomalibDataset or A.Compose object. + + Returns: + dict[str, Any]: Transform dict. + """ + if isinstance(transform_container, dict): + try: + A.from_dict(transform_container) + transform = transform_container + except KeyError as exception: + logging.exception( + f"Unsupported transform: {transform_container}." + " Ensure that the transform dict is in the format required by Albumentations.", + ) + raise KeyError from exception + elif isinstance(transform_container, A.Compose): + transform = transform_container.to_dict() + elif isinstance(transform_container, AnomalibDataset): + transform = transform_container.transform.to_dict() + elif isinstance(transform_container, AnomalibDataModule): + transform = transform_container.test_data.transform.to_dict() + else: + logging.error(f"Unsupported type for transform_container: {type(transform_container)}") + raise TypeError + + return transform + + +def _create_export_root(export_root: str | Path, export_type: ExportType) -> Path: + """Create export directory. + + Args: + export_root (str | Path): Path to the root folder of the exported model. + export_type (ExportType): Mode to export the model. Torch, ONNX or OpenVINO. + + Returns: + Path: Path to the export directory. + """ + export_root = Path(export_root) / "weights" / export_type.value + export_root.mkdir(parents=True, exist_ok=True) + return export_root diff --git a/tests/integration/cli/test_cli.py b/tests/integration/cli/test_cli.py index 8917794345..92922bd625 100644 --- a/tests/integration/cli/test_cli.py +++ b/tests/integration/cli/test_cli.py @@ -13,7 +13,7 @@ import torch from anomalib.cli import AnomalibCLI -from anomalib.deploy.export import ExportType +from anomalib.models import ExportType class TestCLI: diff --git a/tests/integration/model/test_models.py b/tests/integration/model/test_models.py index 27788f506e..1a91452056 100644 --- a/tests/integration/model/test_models.py +++ b/tests/integration/model/test_models.py @@ -14,9 +14,8 @@ from anomalib import TaskType from anomalib.callbacks import ModelCheckpoint from anomalib.data import AnomalibDataModule, MVTec, UCSDped -from anomalib.deploy.export import ExportType from anomalib.engine import Engine -from anomalib.models import AnomalyModule, get_available_models, get_model +from anomalib.models import AnomalyModule, ExportType, get_available_models, get_model def models() -> list[str]: diff --git a/tests/integration/tools/test_gradio_entrypoint.py b/tests/integration/tools/test_gradio_entrypoint.py index 0d2b139956..f6c8e5890d 100644 --- a/tests/integration/tools/test_gradio_entrypoint.py +++ b/tests/integration/tools/test_gradio_entrypoint.py @@ -12,7 +12,7 @@ import pytest from anomalib import TaskType -from anomalib.deploy import OpenVINOInferencer, TorchInferencer, export_to_openvino, export_to_torch +from anomalib.deploy import OpenVINOInferencer, TorchInferencer from anomalib.models import Padim sys.path.append("tools/inference") @@ -46,8 +46,7 @@ def test_torch_inference( model = Padim.load_from_checkpoint(_ckpt_path) # export torch model - export_to_torch( - model=model, + model.to_torch( export_root=_ckpt_path.parent.parent, transform=transforms_config, task=TaskType.SEGMENTATION, @@ -73,9 +72,8 @@ def test_openvino_inference( model = Padim.load_from_checkpoint(_ckpt_path) # export OpenVINO model - export_to_openvino( + model.to_openvino( export_root=_ckpt_path.parent.parent, - model=model, input_size=(256, 256), transform=transforms_config, ov_args={}, diff --git a/tests/integration/tools/test_openvino_entrypoint.py b/tests/integration/tools/test_openvino_entrypoint.py index d83619c0c7..4cd1c8466b 100644 --- a/tests/integration/tools/test_openvino_entrypoint.py +++ b/tests/integration/tools/test_openvino_entrypoint.py @@ -12,7 +12,6 @@ import pytest from anomalib import TaskType -from anomalib.deploy import export_to_openvino from anomalib.models import Padim sys.path.append("tools/inference") @@ -44,9 +43,8 @@ def test_openvino_inference( model = Padim.load_from_checkpoint(_ckpt_path) # export OpenVINO model - export_to_openvino( + model.to_openvino( export_root=_ckpt_path.parent.parent, - model=model, input_size=(256, 256), transform=transforms_config, ov_args={}, diff --git a/tests/integration/tools/test_torch_entrypoint.py b/tests/integration/tools/test_torch_entrypoint.py index 19a80cac8b..ba400e288f 100644 --- a/tests/integration/tools/test_torch_entrypoint.py +++ b/tests/integration/tools/test_torch_entrypoint.py @@ -12,7 +12,6 @@ import pytest from anomalib import TaskType -from anomalib.deploy import export_to_torch from anomalib.models import Padim sys.path.append("tools/inference") @@ -43,8 +42,7 @@ def test_torch_inference( _ckpt_path = ckpt_path("Padim") get_parser, infer = get_functions model = Padim.load_from_checkpoint(_ckpt_path) - export_to_torch( - model=model, + model.to_torch( export_root=_ckpt_path.parent.parent, transform=transforms_config, task=TaskType.SEGMENTATION, diff --git a/tests/unit/data/utils/test_image.py b/tests/unit/data/utils/test_image.py index 6de5a7d6ae..013514bde6 100644 --- a/tests/unit/data/utils/test_image.py +++ b/tests/unit/data/utils/test_image.py @@ -45,8 +45,8 @@ def test_non_image_file(self, dataset_path: Path) -> None: def test_outside_base_dir(self) -> None: """Test ``get_image_filenames`` raises ValueError for a path outside the base directory.""" - with TemporaryDirectory() as tmp_dir, pytest.raises( - ValueError, - match=r"Access denied: Path is outside the allowed directory", - ): + with TemporaryDirectory() as tmp_dir, pytest.warns( + UserWarning, + match=r"Path is outside the secured directory", + ), pytest.raises(ValueError, match=f"Found 0 images in {tmp_dir}"): get_image_filenames(tmp_dir, base_dir=Path.home()) diff --git a/tests/unit/data/utils/test_path.py b/tests/unit/data/utils/test_path.py index 38310c1d82..8b0b6b36e4 100644 --- a/tests/unit/data/utils/test_path.py +++ b/tests/unit/data/utils/test_path.py @@ -45,7 +45,7 @@ def test_symlinks_to_a_path_outside_base_dir_raises_error(self, dataset_path: Pa """Test ``validate_path`` raises ValueError for a symlink to a path outside the base directory.""" symlink_path = dataset_path / "mvtec/dummy/train/good/symlink" symlink_path.symlink_to("/usr/local/lib") - with pytest.raises(ValueError, match=r"Access denied: Path is outside the allowed directory"): + with pytest.warns(UserWarning, match=r"Path is outside the secured directory"): validate_path(symlink_path) def test_nonexistent_file(self, dataset_path: Path) -> None: @@ -60,7 +60,7 @@ def test_nonexistent_directory(self, dataset_path: Path) -> None: def test_outside_base_dir(self) -> None: """Test ``validate_path`` raises ValueError for a path outside the base directory.""" - with pytest.raises(ValueError, match=r"Access denied: Path is outside the allowed directory"): + with pytest.warns(UserWarning, match=r"Path is outside the secured directory"): validate_path("/usr/local/lib") def test_no_read_permission(self) -> None: diff --git a/tests/unit/deploy/test_inferencer.py b/tests/unit/deploy/test_inferencer.py index 493cae2748..96b3c1a4de 100644 --- a/tests/unit/deploy/test_inferencer.py +++ b/tests/unit/deploy/test_inferencer.py @@ -12,9 +12,9 @@ from anomalib import TaskType from anomalib.data import MVTec -from anomalib.deploy import ExportType, OpenVINOInferencer, TorchInferencer +from anomalib.deploy import OpenVINOInferencer, TorchInferencer from anomalib.engine import Engine -from anomalib.models import Padim +from anomalib.models import ExportType, Padim class _MockImageLoader: From 2544fff8c8338d6dbcf8e50e0fe8e080aaac90c3 Mon Sep 17 00:00:00 2001 From: Duc Thinh Ngo Date: Fri, 1 Mar 2024 19:35:34 +0100 Subject: [PATCH 3/8] Updated CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3bd026ca7b..b3fb7cc7a4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -38,6 +38,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### Changed +- 🔨 Move all export functionalities to AnomalyModule as base methods in https://github.com/openvinotoolkit/anomalib/pull/1803 - 🔨 Version bump by @ashwinvaidya17 in https://github.com/openvinotoolkit/anomalib/pull/1305 - 🔨 Modify README custom dataset by @Kiminjo in https://github.com/openvinotoolkit/anomalib/pull/1314 - 🔨 Change the documentation URL in `README.md` and add commands to run each inferencer by @samet-akcay in https://github.com/openvinotoolkit/anomalib/pull/1326 From de00095ae569341517a7367dbbc884bae9f6970f Mon Sep 17 00:00:00 2001 From: Duc Thinh Ngo Date: Mon, 4 Mar 2024 18:03:11 +0100 Subject: [PATCH 4/8] replace flaky with pytest-rerunfailures Signed-off-by: Duc Thinh Ngo --- tox.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 559fa7e2d5..784fba7e5e 100644 --- a/tox.ini +++ b/tox.ini @@ -28,7 +28,7 @@ deps = pytest-cov pytest-mock pytest-order - flaky + pytest-rerunfailures nbmake -r{toxinidir}/requirements/installer.txt -r{toxinidir}/requirements/core.txt @@ -56,7 +56,7 @@ passenv = {[testenv]deps} deps = coverage pytest - flaky + pytest-rerunfailures -r{toxinidir}/requirements/installer.txt -r{toxinidir}/requirements/core.txt -r{toxinidir}/requirements/openvino.txt From 868c89997dd0498ef9db13586c44cb7760b37e33 Mon Sep 17 00:00:00 2001 From: Duc Thinh Ngo Date: Mon, 4 Mar 2024 18:48:31 +0100 Subject: [PATCH 5/8] update notebook Signed-off-by: Duc Thinh Ngo --- notebooks/000_getting_started/001_getting_started.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/notebooks/000_getting_started/001_getting_started.ipynb b/notebooks/000_getting_started/001_getting_started.ipynb index cfc4620eb8..bd0a9a4f8a 100644 --- a/notebooks/000_getting_started/001_getting_started.ipynb +++ b/notebooks/000_getting_started/001_getting_started.ipynb @@ -168,9 +168,9 @@ "from anomalib import TaskType\n", "from anomalib.data import MVTec\n", "from anomalib.data.utils import read_image\n", - "from anomalib.deploy import ExportType, OpenVINOInferencer\n", + "from anomalib.deploy import OpenVINOInferencer\n", "from anomalib.engine import Engine\n", - "from anomalib.models import Padim" + "from anomalib.models import Padim, ExportType" ] }, { From 0244625ba7ea2eaedd444d765889165a63f45903 Mon Sep 17 00:00:00 2001 From: Duc Thinh Ngo Date: Thu, 4 Apr 2024 02:40:24 +0200 Subject: [PATCH 6/8] resolved comments --- .../001_getting_started.ipynb | 4 +- src/anomalib/deploy/__init__.py | 7 +- src/anomalib/deploy/export.py | 373 ++++++++++++++++++ src/anomalib/engine/engine.py | 3 +- src/anomalib/models/__init__.py | 5 +- src/anomalib/models/components/__init__.py | 3 +- .../models/components/base/__init__.py | 4 +- .../models/components/base/anomaly_module.py | 350 +--------------- tests/integration/cli/test_cli.py | 2 +- tests/integration/model/test_models.py | 3 +- tests/unit/deploy/test_inferencer.py | 4 +- tox.ini | 2 +- 12 files changed, 394 insertions(+), 366 deletions(-) create mode 100644 src/anomalib/deploy/export.py diff --git a/notebooks/000_getting_started/001_getting_started.ipynb b/notebooks/000_getting_started/001_getting_started.ipynb index bd0a9a4f8a..a0fcd2d0c9 100644 --- a/notebooks/000_getting_started/001_getting_started.ipynb +++ b/notebooks/000_getting_started/001_getting_started.ipynb @@ -168,9 +168,9 @@ "from anomalib import TaskType\n", "from anomalib.data import MVTec\n", "from anomalib.data.utils import read_image\n", - "from anomalib.deploy import OpenVINOInferencer\n", + "from anomalib.deploy import OpenVINOInferencer, ExportType\n", "from anomalib.engine import Engine\n", - "from anomalib.models import Padim, ExportType" + "from anomalib.models import Padim" ] }, { diff --git a/src/anomalib/deploy/__init__.py b/src/anomalib/deploy/__init__.py index b622b644a8..45581bd8dd 100644 --- a/src/anomalib/deploy/__init__.py +++ b/src/anomalib/deploy/__init__.py @@ -3,10 +3,7 @@ # Copyright (C) 2022-2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 +from .export import ExportType from .inferencers import Inferencer, OpenVINOInferencer, TorchInferencer -__all__ = [ - "Inferencer", - "OpenVINOInferencer", - "TorchInferencer", -] +__all__ = ["Inferencer", "OpenVINOInferencer", "TorchInferencer", "ExportType"] diff --git a/src/anomalib/deploy/export.py b/src/anomalib/deploy/export.py new file mode 100644 index 0000000000..15696b30ed --- /dev/null +++ b/src/anomalib/deploy/export.py @@ -0,0 +1,373 @@ +"""Utilities for optimization and OpenVINO conversion.""" + +# Copyright (C) 2022-2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + + +import json +import logging +from collections.abc import Callable +from enum import Enum +from pathlib import Path +from typing import TYPE_CHECKING, Any + +import numpy as np +import torch +from torch import nn +from torchvision.transforms.v2 import CenterCrop, Compose, Resize, Transform + +from anomalib import TaskType +from anomalib.data.transforms import ExportableCenterCrop +from anomalib.utils.exceptions import try_import + +if TYPE_CHECKING: + from torch.types import Number + +logger = logging.getLogger("anomalib") + +if try_import("openvino"): + from openvino.runtime import serialize + from openvino.tools.ovc import convert_model + + +class ExportType(str, Enum): + """Model export type. + + Examples: + >>> from anomalib.deploy import ExportType + >>> ExportType.ONNX + 'onnx' + >>> ExportType.OPENVINO + 'openvino' + >>> ExportType.TORCH + 'torch' + """ + + ONNX = "onnx" + OPENVINO = "openvino" + TORCH = "torch" + + +class InferenceModel(nn.Module): + """Inference model for export. + + The InferenceModel is used to wrap the model and transform for exporting to torch and ONNX/OpenVINO. + + Args: + model (nn.Module): Model to export. + transform (Transform): Input transform for the model. + disable_antialias (bool, optional): Disable antialiasing in the Resize transforms of the given transform. This + is needed for ONNX/OpenVINO export, as antialiasing is not supported in the ONNX opset. + """ + + def __init__(self, model: nn.Module, transform: Transform, disable_antialias: bool = False) -> None: + super().__init__() + self.model = model + self.transform = transform + self.convert_center_crop() + if disable_antialias: + self.disable_antialias() + + def forward(self, batch: torch.Tensor) -> torch.Tensor | tuple[torch.Tensor, torch.Tensor]: + """Transform the input batch and pass it through the model.""" + batch = self.transform(batch) + return self.model(batch) + + def disable_antialias(self) -> None: + """Disable antialiasing in the Resize transforms of the given transform. + + This is needed for ONNX/OpenVINO export, as antialiasing is not supported in the ONNX opset. + """ + if isinstance(self.transform, Resize): + self.transform.antialias = False + if isinstance(self.transform, Compose): + for transform in self.transform.transforms: + if isinstance(transform, Resize): + transform.antialias = False + + def convert_center_crop(self) -> None: + """Convert CenterCrop to ExportableCenterCrop for ONNX export. + + The original CenterCrop transform is not supported in ONNX export. This method replaces the CenterCrop to + ExportableCenterCrop, which is supported in ONNX export. For more details, see the implementation of + ExportableCenterCrop. + """ + if isinstance(self.transform, CenterCrop): + self.transform = ExportableCenterCrop(size=self.transform.size) + elif isinstance(self.transform, Compose): + transforms = self.transform.transforms + for index in range(len(transforms)): + if isinstance(transforms[index], CenterCrop): + transforms[index] = ExportableCenterCrop(size=transforms[index].size) + + +class ExportMixin: + """This mixin allows exporting models to torch and ONNX/OpenVINO.""" + + model: nn.Module + transform: Transform + configure_transforms: Callable + device: torch.device + + def to_torch( + self, + export_root: Path | str, + transform: Transform | None = None, + task: TaskType | None = None, + ) -> Path: + """Export AnomalibModel to torch. + + Args: + export_root (Path): Path to the output folder. + transform (Transform, optional): Input transforms used for the model. If not provided, the transform is + taken from the model. + Defaults to ``None``. + task (TaskType | None): Task type. + Defaults to ``None``. + + Returns: + Path: Path to the exported pytorch model. + + Examples: + Assume that we have a model to train and we want to export it to torch format. + + >>> from anomalib.data import Visa + >>> from anomalib.models import Patchcore + >>> from anomalib.engine import Engine + ... + >>> datamodule = Visa() + >>> model = Patchcore() + >>> engine = Engine() + ... + >>> engine.fit(model, datamodule) + + Now that we have a model trained, we can export it to torch format. + + >>> model.to_torch( + ... export_root="path/to/export", + ... transform=datamodule.test_data.transform, + ... task=datamodule.test_data.task, + ... ) + """ + transform = transform or self.transform or self.configure_transforms() + inference_model = InferenceModel(model=self.model, transform=transform) + export_root = _create_export_root(export_root, ExportType.TORCH) + metadata = self.get_metadata(task=task) + pt_model_path = export_root / "model.pt" + torch.save( + obj={"model": inference_model, "metadata": metadata}, + f=pt_model_path, + ) + return pt_model_path + + def to_onnx( + self, + export_root: Path | str, + transform: Transform | None = None, + task: TaskType | None = None, + ) -> Path: + """Export model to onnx. + + Args: + export_root (Path): Path to the root folder of the exported model. + transform (Transform, optional): Input transforms used for the model. If not provided, the transform is + taken from the model. + Defaults to ``None``. + task (TaskType | None): Task type. + Defaults to ``None``. + export_type (ExportType): Mode to export the model. Since this method is used by OpenVINO export as well, we + need to pass the export type so that the right export path is created. + Defaults to ``ExportType.ONNX``. + + Returns: + Path: Path to the exported onnx model. + + Examples: + Export the Lightning Model to ONNX: + + >>> from anomalib.models import Patchcore + >>> from anomalib.data import Visa + ... + >>> datamodule = Visa() + >>> model = Patchcore() + ... + >>> model.to_onnx( + ... export_root="path/to/export", + ... transform=datamodule.test_data.transform, + ... task=datamodule.test_data.task + ... ) + + Using Custom Transforms: + This example shows how to use a custom ``Compose`` object for the ``transform`` argument. + + >>> model.to_onnx( + ... export_root="path/to/export", + ... task="segmentation", + ... ) + """ + transform = transform or self.transform or self.configure_transforms() + inference_model = InferenceModel(model=self.model, transform=transform, disable_antialias=True) + export_root = _create_export_root(export_root, ExportType.ONNX) + self._write_metadata_to_json(export_root, task) + onnx_path = export_root / "model.onnx" + torch.onnx.export( + inference_model, + torch.zeros((1, 3, 1, 1)).to(self.device), + str(onnx_path), + opset_version=14, + dynamic_axes={"input": {0: "batch_size", 2: "height", 3: "weight"}, "output": {0: "batch_size"}}, + input_names=["input"], + output_names=["output"], + ) + + return onnx_path + + def to_openvino( + self, + export_root: Path | str, + transform: Transform | None = None, + ov_args: dict[str, Any] | None = None, + task: TaskType | None = None, + ) -> Path: + """Convert onnx model to OpenVINO IR. + + Args: + export_root (Path): Path to the export folder. + transform (Transform, optional): Input transforms used for the model. If not provided, the transform is + taken from the model. + Defaults to ``None``. + ov_args: Model optimizer arguments for OpenVINO model conversion. + Defaults to ``None``. + task (TaskType | None): Task type. + Defaults to ``None``. + + Returns: + Path: Path to the exported onnx model. + + Raises: + ModuleNotFoundError: If OpenVINO is not installed. + + Returns: + Path: Path to the exported OpenVINO IR. + + Examples: + Export the Lightning Model to OpenVINO IR: + This example demonstrates how to export the Lightning Model to OpenVINO IR. + + >>> from anomalib.models import Patchcore + >>> from anomalib.data import Visa + ... + >>> datamodule = Visa() + >>> model = Patchcore() + ... + >>> model.to_openvino( + ... export_root="path/to/export", + ... transform=datamodule.test_data.transform, + ... task=datamodule.test_data.task + ... ) + + Using Custom Transforms: + This example shows how to use a custom ``Transform`` object for the ``transform`` argument. + + >>> from torchvision.transforms.v2 import Resize + >>> transform = Resize(224, 224) + ... + >>> model.to_openvino( + ... export_root="path/to/export", + ... transform=transform, + ... task="segmentation", + ... ) + + """ + transform = transform or self.transform or self.configure_transforms() + export_root = _create_export_root(export_root, ExportType.OPENVINO) + inference_model = InferenceModel(model=self.model, transform=transform, disable_antialias=True) + self._write_metadata_to_json(export_root, task) + ov_model_path = export_root / "model.xml" + ov_args = {} if ov_args is None else ov_args + ov_args.update({"example_input": torch.zeros((1, 3, 1, 1)).to(self.device)}) + if convert_model is not None and serialize is not None: + model = convert_model(inference_model, **ov_args) + serialize(model, ov_model_path) + else: + logger.exception("Could not find OpenVINO methods. Please check OpenVINO installation.") + raise ModuleNotFoundError + return ov_model_path + + def get_metadata( + self, + task: TaskType | None = None, + ) -> dict[str, Any]: + """Get metadata for the exported model. + + Args: + task (TaskType | None): Task type. + Defaults to None. + + Returns: + dict[str, Any]: Metadata for the exported model. + """ + data_metadata = {"task": task} + model_metadata = self._get_model_metadata() + metadata = {**data_metadata, **model_metadata} + + # Convert torch tensors to python lists or values for json serialization. + for key, value in metadata.items(): + if isinstance(value, torch.Tensor): + metadata[key] = value.numpy().tolist() + + return metadata + + def _get_model_metadata(self) -> dict[str, torch.Tensor]: + """Get meta data related to normalization from model. + + Returns: + dict[str, torch.Tensor]: Model metadata + """ + metadata = {} + cached_metadata: dict[str, Number | torch.Tensor] = {} + for threshold_name in ("image_threshold", "pixel_threshold"): + if hasattr(self, threshold_name): + cached_metadata[threshold_name] = getattr(self, threshold_name).cpu().value.item() + if hasattr(self, "normalization_metrics") and self.normalization_metrics.state_dict() is not None: + for key, value in self.normalization_metrics.state_dict().items(): + cached_metadata[key] = value.cpu() + # Remove undefined values by copying in a new dict + for key, val in cached_metadata.items(): + if not np.isinf(val).all(): + metadata[key] = val + del cached_metadata + return metadata + + def _write_metadata_to_json( + self, + export_root: Path, + task: TaskType | None = None, + ) -> None: + """Write metadata to json file. + + Args: + export_root (Path): Path to the exported model. + transform (dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose): Data transforms + (augmentations) used for the model. + task (TaskType | None): Task type. + Defaults to None. + """ + metadata = self.get_metadata(task=task) + with (export_root / "metadata.json").open("w", encoding="utf-8") as metadata_file: + json.dump(metadata, metadata_file, ensure_ascii=False, indent=4) + + +def _create_export_root(export_root: str | Path, export_type: ExportType) -> Path: + """Create export directory. + + Args: + export_root (str | Path): Path to the root folder of the exported model. + export_type (ExportType): Mode to export the model. Torch, ONNX or OpenVINO. + + Returns: + Path: Path to the export directory. + """ + export_root = Path(export_root) / "weights" / export_type.value + export_root.mkdir(parents=True, exist_ok=True) + return export_root diff --git a/src/anomalib/engine/engine.py b/src/anomalib/engine/engine.py index 0edd90fc1b..a443ed680a 100644 --- a/src/anomalib/engine/engine.py +++ b/src/anomalib/engine/engine.py @@ -26,7 +26,8 @@ from anomalib.callbacks.timer import TimerCallback from anomalib.callbacks.visualizer import _VisualizationCallback from anomalib.data import AnomalibDataModule, AnomalibDataset, PredictDataset -from anomalib.models import AnomalyModule, ExportType +from anomalib.deploy import ExportType +from anomalib.models import AnomalyModule from anomalib.utils.normalization import NormalizationMethod from anomalib.utils.path import create_versioned_dir from anomalib.utils.types import NORMALIZATION, THRESHOLD diff --git a/src/anomalib/models/__init__.py b/src/anomalib/models/__init__.py index 57a9757271..722cd1dfe5 100644 --- a/src/anomalib/models/__init__.py +++ b/src/anomalib/models/__init__.py @@ -10,7 +10,7 @@ from jsonargparse import Namespace from omegaconf import DictConfig, OmegaConf -from anomalib.models.components import AnomalyModule, ExportType +from anomalib.models.components import AnomalyModule from anomalib.utils.path import convert_to_snake_case from .image import ( @@ -36,7 +36,7 @@ class UnknownModelError(ModuleNotFoundError): - ... + pass __all__ = [ @@ -58,7 +58,6 @@ class UnknownModelError(ModuleNotFoundError): "Uflow", "AiVad", "WinClip", - "ExportType", ] logger = logging.getLogger(__name__) diff --git a/src/anomalib/models/components/__init__.py b/src/anomalib/models/components/__init__.py index 46ef4ff6b9..b37daafefe 100644 --- a/src/anomalib/models/components/__init__.py +++ b/src/anomalib/models/components/__init__.py @@ -3,7 +3,7 @@ # Copyright (C) 2022-2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -from .base import AnomalyModule, BufferListMixin, DynamicBufferMixin, ExportType, MemoryBankMixin +from .base import AnomalyModule, BufferListMixin, DynamicBufferMixin, MemoryBankMixin from .dimensionality_reduction import PCA, SparseRandomProjection from .feature_extractors import TimmFeatureExtractor, TorchFXFeatureExtractor from .filters import GaussianBlur2d @@ -23,5 +23,4 @@ "SparseRandomProjection", "TimmFeatureExtractor", "TorchFXFeatureExtractor", - "ExportType", ] diff --git a/src/anomalib/models/components/base/__init__.py b/src/anomalib/models/components/base/__init__.py index 59dcba8126..b535c910cb 100644 --- a/src/anomalib/models/components/base/__init__.py +++ b/src/anomalib/models/components/base/__init__.py @@ -3,9 +3,9 @@ # Copyright (C) 2022-2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -from .anomaly_module import AnomalyModule, ExportType +from .anomaly_module import AnomalyModule from .buffer_list import BufferListMixin from .dynamic_buffer import DynamicBufferMixin from .memory_bank_module import MemoryBankMixin -__all__ = ["AnomalyModule", "BufferListMixin", "DynamicBufferMixin", "MemoryBankMixin", "ExportType"] +__all__ = ["AnomalyModule", "BufferListMixin", "DynamicBufferMixin", "MemoryBankMixin"] diff --git a/src/anomalib/models/components/base/anomaly_module.py b/src/anomalib/models/components/base/anomaly_module.py index 89acabb321..eb6eef9791 100644 --- a/src/anomalib/models/components/base/anomaly_module.py +++ b/src/anomalib/models/components/base/anomaly_module.py @@ -4,112 +4,32 @@ # SPDX-License-Identifier: Apache-2.0 import importlib -import json import logging from abc import ABC, abstractproperty from collections import OrderedDict -from enum import Enum -from pathlib import Path from typing import TYPE_CHECKING, Any import lightning.pytorch as pl -import numpy as np import torch from lightning.pytorch.trainer.states import TrainerFn from lightning.pytorch.utilities.types import STEP_OUTPUT from torch import nn -from torchvision.transforms.v2 import CenterCrop, Compose, Normalize, Resize, Transform +from torchvision.transforms.v2 import Compose, Normalize, Resize, Transform -from anomalib import LearningType, TaskType -from anomalib.data.transforms import ExportableCenterCrop +from anomalib import LearningType +from anomalib.deploy.export import ExportMixin from anomalib.metrics import AnomalibMetricCollection from anomalib.metrics.threshold import BaseThreshold -from anomalib.utils.exceptions import try_import if TYPE_CHECKING: from lightning.pytorch.callbacks import Callback - from torch.types import Number from torchmetrics import Metric logger = logging.getLogger(__name__) -if try_import("openvino"): - from openvino.runtime import serialize - from openvino.tools.ovc import convert_model - - -class ExportType(str, Enum): - """Model export type. - - Examples: - >>> from anomalib.models import ExportType - >>> ExportType.ONNX - 'onnx' - >>> ExportType.OPENVINO - 'openvino' - >>> ExportType.TORCH - 'torch' - """ - - ONNX = "onnx" - OPENVINO = "openvino" - TORCH = "torch" - - -class InferenceModel(nn.Module): - """Inference model for export. - - The InferenceModel is used to wrap the model and transform for exporting to torch and ONNX/OpenVINO. - - Args: - model (nn.Module): Model to export. - transform (Transform): Input transform for the model. - disable_antialias (bool, optional): Disable antialiasing in the Resize transforms of the given transform. This - is needed for ONNX/OpenVINO export, as antialiasing is not supported in the ONNX opset. - """ - - def __init__(self, model: nn.Module, transform: Transform, disable_antialias: bool = False) -> None: - super().__init__() - self.model = model - self.transform = transform - self.convert_center_crop() - if disable_antialias: - self.disable_antialias() - - def forward(self, batch: torch.Tensor) -> torch.Tensor | tuple[torch.Tensor, torch.Tensor]: - """Transform the input batch and pass it through the model.""" - batch = self.transform(batch) - return self.model(batch) - - def disable_antialias(self) -> None: - """Disable antialiasing in the Resize transforms of the given transform. - - This is needed for ONNX/OpenVINO export, as antialiasing is not supported in the ONNX opset. - """ - if isinstance(self.transform, Resize): - self.transform.antialias = False - if isinstance(self.transform, Compose): - for transform in self.transform.transforms: - if isinstance(transform, Resize): - transform.antialias = False - - def convert_center_crop(self) -> None: - """Convert CenterCrop to ExportableCenterCrop for ONNX export. - - The original CenterCrop transform is not supported in ONNX export. This method replaces the CenterCrop to - ExportableCenterCrop, which is supported in ONNX export. For more details, see the implementation of - ExportableCenterCrop. - """ - if isinstance(self.transform, CenterCrop): - self.transform = ExportableCenterCrop(size=self.transform.size) - elif isinstance(self.transform, Compose): - transforms = self.transform.transforms - for index in range(len(transforms)): - if isinstance(transforms[index], CenterCrop): - transforms[index] = ExportableCenterCrop(size=transforms[index].size) -class AnomalyModule(pl.LightningModule, ABC): +class AnomalyModule(ExportMixin, pl.LightningModule, ABC): """AnomalyModule to train, validate, predict and test images. Acts as a base class for all the Anomaly Modules in the library. @@ -352,265 +272,3 @@ def on_load_checkpoint(self, checkpoint: dict[str, Any]) -> None: """ self._transform = checkpoint["transform"] self.setup("load_checkpoint") - - def to_torch( - self, - export_root: Path | str, - transform: Transform | None = None, - task: TaskType | None = None, - ) -> Path: - """Export AnomalibModel to torch. - - Args: - export_root (Path): Path to the output folder. - transform (Transform, optional): Input transforms used for the model. If not provided, the transform is - taken from the model. - Defaults to ``None``. - task (TaskType | None): Task type. - Defaults to ``None``. - - Returns: - Path: Path to the exported pytorch model. - - Examples: - Assume that we have a model to train and we want to export it to torch format. - - >>> from anomalib.data import Visa - >>> from anomalib.models import Patchcore - >>> from anomalib.engine import Engine - ... - >>> datamodule = Visa() - >>> model = Patchcore() - >>> engine = Engine() - ... - >>> engine.fit(model, datamodule) - - Now that we have a model trained, we can export it to torch format. - - >>> model.to_torch( - ... export_root="path/to/export", - ... transform=datamodule.test_data.transform, - ... task=datamodule.test_data.task, - ... ) - """ - transform = transform or self.transform or self.configure_transforms() - inference_model = InferenceModel(model=self.model, transform=transform) - export_root = _create_export_root(export_root, ExportType.TORCH) - metadata = self.get_metadata(task=task) - pt_model_path = export_root / "model.pt" - torch.save( - obj={"model": inference_model, "metadata": metadata}, - f=pt_model_path, - ) - return pt_model_path - - def to_onnx( - self, - export_root: Path | str, - transform: Transform | None = None, - task: TaskType | None = None, - export_type: ExportType = ExportType.ONNX, - ) -> Path: - """Export model to onnx. - - Args: - export_root (Path): Path to the root folder of the exported model. - transform (Transform, optional): Input transforms used for the model. If not provided, the transform is - taken from the model. - Defaults to ``None``. - task (TaskType | None): Task type. - Defaults to ``None``. - export_type (ExportType): Mode to export the model. Since this method is used by OpenVINO export as well, we - need to pass the export type so that the right export path is created. - Defaults to ``ExportType.ONNX``. - - Returns: - Path: Path to the exported onnx model. - - Examples: - Export the Lightning Model to ONNX: - - >>> from anomalib.models import Patchcore - >>> from anomalib.data import Visa - ... - >>> datamodule = Visa() - >>> model = Patchcore() - ... - >>> model.to_onnx( - ... export_root="path/to/export", - ... transform=datamodule.test_data.transform, - ... task=datamodule.test_data.task - ... ) - - Using Custom Transforms: - This example shows how to use a custom ``Compose`` object for the ``transform`` argument. - - >>> model.to_onnx( - ... export_root="path/to/export", - ... task="segmentation", - ... ) - """ - # TODO(djdameln): Move export functionality to anomaly module - # https://github.com/openvinotoolkit/anomalib/issues/1752 - transform = transform or self.transform or self.configure_transforms() - inference_model = InferenceModel(model=self.model, transform=transform, disable_antialias=True) - export_root = _create_export_root(export_root, export_type) - self._write_metadata_to_json(export_root, task) - onnx_path = export_root / "model.onnx" - torch.onnx.export( - inference_model, - torch.zeros((1, 3, 1, 1)).to(self.device), - str(onnx_path), - opset_version=14, - dynamic_axes={"input": {0: "batch_size", 2: "height", 3: "weight"}, "output": {0: "batch_size"}}, - input_names=["input"], - output_names=["output"], - ) - - return onnx_path - - def to_openvino( - self, - export_root: Path | str, - transform: Transform | None = None, - ov_args: dict[str, Any] | None = None, - task: TaskType | None = None, - ) -> Path: - """Convert onnx model to OpenVINO IR. - - Args: - export_root (Path): Path to the export folder. - transform (Transform, optional): Input transforms used for the model. If not provided, the transform is - taken from the model. - Defaults to ``None``. - ov_args: Model optimizer arguments for OpenVINO model conversion. - Defaults to ``None``. - task (TaskType | None): Task type. - Defaults to ``None``. - - Returns: - Path: Path to the exported onnx model. - - Raises: - ModuleNotFoundError: If OpenVINO is not installed. - - Returns: - Path: Path to the exported OpenVINO IR. - - Examples: - Export the Lightning Model to OpenVINO IR: - This example demonstrates how to export the Lightning Model to OpenVINO IR. - - >>> from anomalib.models import Patchcore - >>> from anomalib.data import Visa - ... - >>> datamodule = Visa() - >>> model = Patchcore() - ... - >>> model.to_openvino( - ... export_root="path/to/export", - ... transform=datamodule.test_data.transform, - ... task=datamodule.test_data.task - ... ) - - Using Custom Transforms: - This example shows how to use a custom ``Transform`` object for the ``transform`` argument. - - >>> from torchvision.transforms.v2 import Resize - >>> transform = Resize(224, 224) - ... - >>> model.to_openvino( - ... export_root="path/to/export", - ... transform=transform, - ... task="segmentation", - ... ) - - """ - model_path = self.to_onnx(export_root, transform, task, ExportType.OPENVINO) - ov_model_path = model_path.with_suffix(".xml") - ov_args = {} if ov_args is None else ov_args - if convert_model is not None and serialize is not None: - model = convert_model(model_path, **ov_args) - serialize(model, ov_model_path) - else: - logger.exception("Could not find OpenVINO methods. Please check OpenVINO installation.") - raise ModuleNotFoundError - return ov_model_path - - def get_metadata( - self, - task: TaskType | None = None, - ) -> dict[str, Any]: - """Get metadata for the exported model. - - Args: - task (TaskType | None): Task type. - Defaults to None. - - Returns: - dict[str, Any]: Metadata for the exported model. - """ - data_metadata = {"task": task} - model_metadata = self._get_model_metadata() - metadata = {**data_metadata, **model_metadata} - - # Convert torch tensors to python lists or values for json serialization. - for key, value in metadata.items(): - if isinstance(value, torch.Tensor): - metadata[key] = value.numpy().tolist() - - return metadata - - def _get_model_metadata(self) -> dict[str, torch.Tensor]: - """Get meta data related to normalization from model. - - Returns: - dict[str, torch.Tensor]: Model metadata - """ - metadata = {} - cached_metadata: dict[str, Number | torch.Tensor] = {} - for threshold_name in ("image_threshold", "pixel_threshold"): - if hasattr(self, threshold_name): - cached_metadata[threshold_name] = getattr(self, threshold_name).cpu().value.item() - if hasattr(self, "normalization_metrics") and self.normalization_metrics.state_dict() is not None: - for key, value in self.normalization_metrics.state_dict().items(): - cached_metadata[key] = value.cpu() - # Remove undefined values by copying in a new dict - for key, val in cached_metadata.items(): - if not np.isinf(val).all(): - metadata[key] = val - del cached_metadata - return metadata - - def _write_metadata_to_json( - self, - export_root: Path, - task: TaskType | None = None, - ) -> None: - """Write metadata to json file. - - Args: - export_root (Path): Path to the exported model. - transform (dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose): Data transforms - (augmentations) used for the model. - task (TaskType | None): Task type. - Defaults to None. - """ - metadata = self.get_metadata(task=task) - with (export_root / "metadata.json").open("w", encoding="utf-8") as metadata_file: - json.dump(metadata, metadata_file, ensure_ascii=False, indent=4) - - -def _create_export_root(export_root: str | Path, export_type: ExportType) -> Path: - """Create export directory. - - Args: - export_root (str | Path): Path to the root folder of the exported model. - export_type (ExportType): Mode to export the model. Torch, ONNX or OpenVINO. - - Returns: - Path: Path to the export directory. - """ - export_root = Path(export_root) / "weights" / export_type.value - export_root.mkdir(parents=True, exist_ok=True) - return export_root diff --git a/tests/integration/cli/test_cli.py b/tests/integration/cli/test_cli.py index 5786831083..689676203c 100644 --- a/tests/integration/cli/test_cli.py +++ b/tests/integration/cli/test_cli.py @@ -13,7 +13,7 @@ import torch from anomalib.cli import AnomalibCLI -from anomalib.models import ExportType +from anomalib.deploy import ExportType class TestCLI: diff --git a/tests/integration/model/test_models.py b/tests/integration/model/test_models.py index c607380575..1004372ece 100644 --- a/tests/integration/model/test_models.py +++ b/tests/integration/model/test_models.py @@ -13,8 +13,9 @@ from anomalib import TaskType from anomalib.data import AnomalibDataModule, MVTec +from anomalib.deploy import ExportType from anomalib.engine import Engine -from anomalib.models import AnomalyModule, ExportType, get_available_models, get_model +from anomalib.models import AnomalyModule, get_available_models, get_model def models() -> set[str]: diff --git a/tests/unit/deploy/test_inferencer.py b/tests/unit/deploy/test_inferencer.py index d9662a32f8..99fd02bae3 100644 --- a/tests/unit/deploy/test_inferencer.py +++ b/tests/unit/deploy/test_inferencer.py @@ -11,9 +11,9 @@ import torch from anomalib import TaskType -from anomalib.deploy import OpenVINOInferencer, TorchInferencer +from anomalib.deploy import ExportType, OpenVINOInferencer, TorchInferencer from anomalib.engine import Engine -from anomalib.models import ExportType, Padim +from anomalib.models import Padim class _MockImageLoader: diff --git a/tox.ini b/tox.ini index 787786966b..a8136f0fe7 100644 --- a/tox.ini +++ b/tox.ini @@ -28,7 +28,7 @@ deps = pytest-cov pytest-mock pytest-order - pytest-rerunfailures + flaky nbmake commands = From a1c283aaae535f5d094bc68833340e525a2662d5 Mon Sep 17 00:00:00 2001 From: Duc Thinh Ngo Date: Fri, 12 Apr 2024 18:43:55 +0200 Subject: [PATCH 7/8] minor fixes --- src/anomalib/deploy/export.py | 56 +++++++++++++---------------------- 1 file changed, 20 insertions(+), 36 deletions(-) diff --git a/src/anomalib/deploy/export.py b/src/anomalib/deploy/export.py index 15696b30ed..f9df8368ae 100644 --- a/src/anomalib/deploy/export.py +++ b/src/anomalib/deploy/export.py @@ -208,7 +208,7 @@ def to_onnx( transform = transform or self.transform or self.configure_transforms() inference_model = InferenceModel(model=self.model, transform=transform, disable_antialias=True) export_root = _create_export_root(export_root, ExportType.ONNX) - self._write_metadata_to_json(export_root, task) + _write_metadata_to_json(self.get_metadata(task), export_root) onnx_path = export_root / "model.onnx" torch.onnx.export( inference_model, @@ -282,7 +282,7 @@ def to_openvino( transform = transform or self.transform or self.configure_transforms() export_root = _create_export_root(export_root, ExportType.OPENVINO) inference_model = InferenceModel(model=self.model, transform=transform, disable_antialias=True) - self._write_metadata_to_json(export_root, task) + _write_metadata_to_json(self.get_metadata(task), export_root) ov_model_path = export_root / "model.xml" ov_args = {} if ov_args is None else ov_args ov_args.update({"example_input": torch.zeros((1, 3, 1, 1)).to(self.device)}) @@ -308,23 +308,7 @@ def get_metadata( dict[str, Any]: Metadata for the exported model. """ data_metadata = {"task": task} - model_metadata = self._get_model_metadata() - metadata = {**data_metadata, **model_metadata} - - # Convert torch tensors to python lists or values for json serialization. - for key, value in metadata.items(): - if isinstance(value, torch.Tensor): - metadata[key] = value.numpy().tolist() - - return metadata - - def _get_model_metadata(self) -> dict[str, torch.Tensor]: - """Get meta data related to normalization from model. - - Returns: - dict[str, torch.Tensor]: Model metadata - """ - metadata = {} + model_metadata = {} cached_metadata: dict[str, Number | torch.Tensor] = {} for threshold_name in ("image_threshold", "pixel_threshold"): if hasattr(self, threshold_name): @@ -335,27 +319,27 @@ def _get_model_metadata(self) -> dict[str, torch.Tensor]: # Remove undefined values by copying in a new dict for key, val in cached_metadata.items(): if not np.isinf(val).all(): - metadata[key] = val + model_metadata[key] = val del cached_metadata + metadata = {**data_metadata, **model_metadata} + + # Convert torch tensors to python lists or values for json serialization. + for key, value in metadata.items(): + if isinstance(value, torch.Tensor): + metadata[key] = value.numpy().tolist() + return metadata - def _write_metadata_to_json( - self, - export_root: Path, - task: TaskType | None = None, - ) -> None: - """Write metadata to json file. - Args: - export_root (Path): Path to the exported model. - transform (dict[str, Any] | AnomalibDataset | AnomalibDataModule | A.Compose): Data transforms - (augmentations) used for the model. - task (TaskType | None): Task type. - Defaults to None. - """ - metadata = self.get_metadata(task=task) - with (export_root / "metadata.json").open("w", encoding="utf-8") as metadata_file: - json.dump(metadata, metadata_file, ensure_ascii=False, indent=4) +def _write_metadata_to_json(metadata: dict[str, Any], export_root: Path) -> None: + """Write metadata to json file. + + Args: + metadata (dict[str, Any]): Metadata to export. + export_root (Path): Path to the exported model. + """ + with (export_root / "metadata.json").open("w", encoding="utf-8") as metadata_file: + json.dump(metadata, metadata_file, ensure_ascii=False, indent=4) def _create_export_root(export_root: str | Path, export_type: ExportType) -> Path: From 01adb194c9048da49923958ee4fd260056890e82 Mon Sep 17 00:00:00 2001 From: Duc Thinh Ngo Date: Fri, 12 Apr 2024 20:07:37 +0200 Subject: [PATCH 8/8] revert to openvino-export via onnx --- src/anomalib/deploy/export.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/src/anomalib/deploy/export.py b/src/anomalib/deploy/export.py index f9df8368ae..9c74eb8e97 100644 --- a/src/anomalib/deploy/export.py +++ b/src/anomalib/deploy/export.py @@ -175,9 +175,6 @@ def to_onnx( Defaults to ``None``. task (TaskType | None): Task type. Defaults to ``None``. - export_type (ExportType): Mode to export the model. Since this method is used by OpenVINO export as well, we - need to pass the export type so that the right export path is created. - Defaults to ``ExportType.ONNX``. Returns: Path: Path to the exported onnx model. @@ -279,15 +276,13 @@ def to_openvino( ... ) """ - transform = transform or self.transform or self.configure_transforms() + model_path = self.to_onnx(export_root, transform, task) export_root = _create_export_root(export_root, ExportType.OPENVINO) - inference_model = InferenceModel(model=self.model, transform=transform, disable_antialias=True) - _write_metadata_to_json(self.get_metadata(task), export_root) ov_model_path = export_root / "model.xml" ov_args = {} if ov_args is None else ov_args ov_args.update({"example_input": torch.zeros((1, 3, 1, 1)).to(self.device)}) if convert_model is not None and serialize is not None: - model = convert_model(inference_model, **ov_args) + model = convert_model(model_path, **ov_args) serialize(model, ov_model_path) else: logger.exception("Could not find OpenVINO methods. Please check OpenVINO installation.")