diff --git a/CHANGELOG.md b/CHANGELOG.md index 0c6b4e5d96..e7f23ef2ac 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### Added +- 🚀 Update OpenVINO and ONNX export to support fixed input shape by @adrianboguszewski in https://github.com/openvinotoolkit/anomalib/pull/2006 - Add data_path argument to predict entrypoint and add properties for retrieving model path by @djdameln in https://github.com/openvinotoolkit/anomalib/pull/2018 ### Changed diff --git a/pyproject.toml b/pyproject.toml index fa5af34ae4..0e3e5316a3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ core = [ "torchmetrics>=1.3.2", "open-clip-torch>=2.23.0", ] -openvino = ["openvino-dev>=2023.0", "nncf>=2.5.0", "onnx>=1.16.0"] +openvino = ["openvino-dev>=2023.1", "nncf>=2.6.0", "onnx>=1.16.0"] loggers = [ "comet-ml>=3.31.7", "gradio>=4", diff --git a/src/anomalib/cli/cli.py b/src/anomalib/cli/cli.py index 1accf96f57..d13ed039ec 100644 --- a/src/anomalib/cli/cli.py +++ b/src/anomalib/cli/cli.py @@ -232,7 +232,7 @@ def add_export_arguments(self, parser: ArgumentParser) -> None: added = parser.add_method_arguments( Engine, "export", - skip={"mo_args", "model"}, + skip={"ov_args", "model"}, ) self.subcommand_method_arguments["export"] = added add_openvino_export_arguments(parser) diff --git a/src/anomalib/cli/utils/openvino.py b/src/anomalib/cli/utils/openvino.py index 70e329f6b4..65ac7b80db 100644 --- a/src/anomalib/cli/utils/openvino.py +++ b/src/anomalib/cli/utils/openvino.py @@ -22,11 +22,11 @@ def add_openvino_export_arguments(parser: ArgumentParser) -> None: """Add OpenVINO arguments to parser under --mo key.""" if get_common_cli_parser is not None: group = parser.add_argument_group("OpenVINO Model Optimizer arguments (optional)") - mo_parser = get_common_cli_parser() + ov_parser = get_common_cli_parser() # remove redundant keys from mo keys - for arg in mo_parser._actions: # noqa: SLF001 + for arg in ov_parser._actions: # noqa: SLF001 if arg.dest in ("help", "input_model", "output_dir"): continue - group.add_argument(f"--mo_args.{arg.dest}", type=arg.type, default=arg.default, help=arg.help) + group.add_argument(f"--ov_args.{arg.dest}", type=arg.type, default=arg.default, help=arg.help) else: logger.info("OpenVINO is possibly not installed in the environment. Skipping adding it to parser.") diff --git a/src/anomalib/deploy/export.py b/src/anomalib/deploy/export.py index 83d63bf0dd..ecaa72a42a 100644 --- a/src/anomalib/deploy/export.py +++ b/src/anomalib/deploy/export.py @@ -25,10 +25,6 @@ logger = logging.getLogger("anomalib") -if try_import("openvino"): - from openvino.runtime import serialize - from openvino.tools.ovc import convert_model - class ExportType(str, Enum): """Model export type. @@ -160,6 +156,7 @@ def export_to_torch( def export_to_onnx( model: AnomalyModule, export_root: Path | str, + input_size: tuple[int, int] | None = None, transform: Transform | None = None, task: TaskType | None = None, export_type: ExportType = ExportType.ONNX, @@ -169,6 +166,8 @@ def export_to_onnx( Args: model (AnomalyModule): Model to export. export_root (Path): Path to the root folder of the exported model. + input_size (tuple[int, int] | None, optional): Image size used as the input for onnx converter. + Defaults to None. transform (Transform, optional): Input transforms used for the model. If not provided, the transform is taken from the model. Defaults to ``None``. @@ -212,14 +211,18 @@ def export_to_onnx( transform = transform or model.transform or model.configure_transforms() inference_model = InferenceModel(model=model.model, transform=transform, disable_antialias=True) export_root = _create_export_root(export_root, export_type) + input_shape = torch.zeros((1, 3, *input_size)) if input_size else torch.zeros((1, 3, 1, 1)) + dynamic_axes = ( + None if input_size else {"input": {0: "batch_size", 2: "height", 3: "weight"}, "output": {0: "batch_size"}} + ) _write_metadata_to_json(export_root, model, task) onnx_path = export_root / "model.onnx" torch.onnx.export( inference_model, - torch.zeros((1, 3, 1, 1)).to(model.device), + input_shape.to(model.device), str(onnx_path), opset_version=14, - dynamic_axes={"input": {0: "batch_size", 2: "height", 3: "weight"}, "output": {0: "batch_size"}}, + dynamic_axes=dynamic_axes, input_names=["input"], output_names=["output"], ) @@ -228,8 +231,9 @@ def export_to_onnx( def export_to_openvino( - export_root: Path | str, model: AnomalyModule, + export_root: Path | str, + input_size: tuple[int, int] | None = None, transform: Transform | None = None, ov_args: dict[str, Any] | None = None, task: TaskType | None = None, @@ -237,8 +241,10 @@ def export_to_openvino( """Convert onnx model to OpenVINO IR. Args: - export_root (Path): Path to the export folder. model (AnomalyModule): AnomalyModule to export. + export_root (Path): Path to the export folder. + input_size (tuple[int, int] | None, optional): Input size of the model. Used for adding metadata to the IR. + Defaults to None. transform (Transform, optional): Input transforms used for the model. If not provided, the transform is taken from the model. Defaults to ``None``. @@ -289,15 +295,21 @@ def export_to_openvino( ... ) """ - model_path = export_to_onnx(model, export_root, transform, task, ExportType.OPENVINO) + if not try_import("openvino"): + logger.exception("Could not find OpenVINO. Please check OpenVINO installation.") + raise ModuleNotFoundError + + import openvino as ov + + model_path = export_to_onnx(model, export_root, input_size, transform, task, ExportType.OPENVINO) ov_model_path = model_path.with_suffix(".xml") ov_args = {} if ov_args is None else ov_args - if convert_model is not None and serialize is not None: - model = convert_model(model_path, **ov_args) - serialize(model, ov_model_path) - else: - logger.exception("Could not find OpenVINO methods. Please check OpenVINO installation.") - raise ModuleNotFoundError + # fp16 compression is enabled by default + compress_to_fp16 = ov_args.get("compress_to_fp16", True) + + model = ov.convert_model(model_path, **ov_args) + ov.save_model(model, ov_model_path, compress_to_fp16=compress_to_fp16) + return ov_model_path diff --git a/src/anomalib/deploy/inferencers/openvino_inferencer.py b/src/anomalib/deploy/inferencers/openvino_inferencer.py index db0d966fad..3e8f18540e 100644 --- a/src/anomalib/deploy/inferencers/openvino_inferencer.py +++ b/src/anomalib/deploy/inferencers/openvino_inferencer.py @@ -199,6 +199,10 @@ def predict( msg = f"Input image must be a numpy array or a path to an image. Got {type(image)}" raise TypeError(msg) + # Resize image to model input size if not dynamic + if self.input_blob.partial_shape[2].is_static and self.input_blob.partial_shape[3].is_static: + image = cv2.resize(image, tuple(self.input_blob.shape[2:][::-1])) + # Normalize numpy array to range [0, 1] if image.dtype != np.float32: image = image.astype(np.float32) diff --git a/src/anomalib/engine/engine.py b/src/anomalib/engine/engine.py index 43e9e2d213..16d9f4f80d 100644 --- a/src/anomalib/engine/engine.py +++ b/src/anomalib/engine/engine.py @@ -867,6 +867,7 @@ def export( model: AnomalyModule, export_type: ExportType, export_root: str | Path | None = None, + input_size: tuple[int, int] | None = None, transform: Transform | None = None, ov_args: dict[str, Any] | None = None, ckpt_path: str | Path | None = None, @@ -878,6 +879,8 @@ def export( export_type (ExportType): Export type. export_root (str | Path | None, optional): Path to the output directory. If it is not set, the model is exported to trainer.default_root_dir. Defaults to None. + input_size (tuple[int, int] | None, optional): A statis input shape for the model, which is exported to ONNX + and OpenVINO format. Defaults to None. transform (Transform | None, optional): Input transform to include in the exported model. If not provided, the engine will try to use the transform from the datamodule or dataset. Defaults to None. ov_args (dict[str, Any] | None, optional): This is optional and used only for OpenVINO's model optimizer. @@ -904,10 +907,10 @@ def export( ```python anomalib export --model Padim --export_mode OPENVINO --data Visa --input_size "[256,256]" ``` - 4. You can also overrride OpenVINO model optimizer by adding the ``--mo_args.`` arguments. + 4. You can also overrride OpenVINO model optimizer by adding the ``--ov_args.`` arguments. ```python anomalib export --model Padim --export_mode OPENVINO --data Visa --input_size "[256,256]" \ - --mo_args.compress_to_fp16 False + --ov_args.compress_to_fp16 False ``` """ self._setup_trainer(model) @@ -930,6 +933,7 @@ def export( exported_model_path = export_to_onnx( model=model, export_root=export_root, + input_size=input_size, transform=transform, task=self.task, ) @@ -937,6 +941,7 @@ def export( exported_model_path = export_to_openvino( model=model, export_root=export_root, + input_size=input_size, transform=transform, task=self.task, ov_args=ov_args,