Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Migrate anomaly exportable code to modelAPI #2432

Merged
Show file tree
Hide file tree
Changes from 18 commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
2527ae2
Fix license in PR template
Mar 28, 2023
3c029d3
Merge branch 'develop' of github.com:ashwinvaidya17/training_extensio…
Apr 12, 2023
7d9d9e0
Merge branch 'develop' of github.com:openvinotoolkit/training_extensi…
May 23, 2023
fce6281
Merge branch 'develop' of github.com:openvinotoolkit/training_extensi…
May 31, 2023
4f1dd48
Merge branch 'develop' of github.com:openvinotoolkit/training_extensi…
Jul 5, 2023
185f355
Merge branch 'develop' of github.com:openvinotoolkit/training_extensi…
Jul 25, 2023
bc8fd58
Merge branch 'openvinotoolkit:develop' into develop
ashwinvaidya17 Jul 25, 2023
f89ee97
Merge branch 'develop' of github.com:openvinotoolkit/training_extensi…
Aug 9, 2023
53f5508
Migrate to modelAPI
Aug 10, 2023
e5f59dd
Remove color conversion in streamer
Aug 10, 2023
36c8a5c
Remove reverse_input_channels
Aug 10, 2023
0519ad4
Add float
Aug 10, 2023
648e3a2
Remove test as metadata is no longer used
Aug 10, 2023
cbd5c83
Remove metadata from load method
Aug 11, 2023
f1dbe1d
Merge branch 'develop' into ashwin/migrate_anomaly_model_api
ashwinvaidya17 Aug 11, 2023
9732d5e
remove anomalib openvino inferencer
Aug 21, 2023
5ce814a
Merge branch 'develop' of github.com:openvinotoolkit/training_extensi…
Aug 21, 2023
de02eb7
fix signature
Aug 21, 2023
cf52ab6
Merge branch 'develop' into ashwin/migrate_anomaly_model_api
ashwinvaidya17 Aug 25, 2023
a68fadd
Merge branch 'develop' of github.com:openvinotoolkit/training_extensi…
Sep 6, 2023
3c37a9f
Merge branch 'develop' of github.com:openvinotoolkit/training_extensi…
Oct 5, 2023
f514cef
Support logacy OpenVINO model
Oct 5, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

2 changes: 1 addition & 1 deletion src/otx/algorithms/anomaly/tasks/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -357,7 +357,7 @@ def _add_metadata_to_ir(self, model_file: str, export_type: ExportType) -> None:
if "min" in metadata and "max" in metadata:
extra_model_data[("model_info", "normalization_scale")] = metadata["max"] - metadata["min"]

extra_model_data[("model_info", "reverse_input_channels")] = True
extra_model_data[("model_info", "reverse_input_channels")] = False
extra_model_data[("model_info", "model_type")] = "AnomalyDetection"
extra_model_data[("model_info", "labels")] = "Normal Anomaly"
if export_type == ExportType.OPENVINO:
Expand Down
78 changes: 17 additions & 61 deletions src/otx/algorithms/anomaly/tasks/openvino.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,10 @@
import numpy as np
import openvino.runtime as ov
from addict import Dict as ADDict
from anomalib.deploy import OpenVINOInferencer
from nncf.common.quantization.structs import QuantizationPreset
from omegaconf import OmegaConf
from openvino.model_api.models import AnomalyDetection, AnomalyResult

import otx.algorithms.anomaly.adapters.anomalib.exportable_code
from otx.algorithms.anomaly.adapters.anomalib.config import get_anomalib_config
from otx.algorithms.anomaly.adapters.anomalib.logger import get_logger
from otx.algorithms.anomaly.configs.base.configuration import BaseAnomalyConfig
Expand Down Expand Up @@ -79,17 +78,15 @@ class OTXOpenVINOAnomalyDataloader:

Args:
dataset (DatasetEntity): OTX dataset entity
inferencer (OpenVINOInferencer): OpenVINO Inferencer
shuffle (bool, optional): Shuffle dataset. Defaults to True.
"""

def __init__(
self,
dataset: DatasetEntity,
inferencer: OpenVINOInferencer,
shuffle: bool = True,
):
self.dataset = dataset
self.inferencer = inferencer
self.shuffler = None
if shuffle:
self.shuffler = list(range(len(dataset)))
Expand All @@ -109,9 +106,8 @@ def __getitem__(self, index: int):

image = self.dataset[index].numpy
annotation = self.dataset[index].annotation_scene
inputs = self.inferencer.pre_process(image)

return (index, annotation), inputs
return (index, annotation), image

def __len__(self) -> int:
"""Get size of the dataset.
Expand All @@ -134,7 +130,7 @@ def __init__(self, task_environment: TaskEnvironment) -> None:
self.task_environment = task_environment
self.task_type = self.task_environment.model_template.task_type
self.config = self.get_config()
self.inferencer = self.load_inferencer()
self.inference_model = self.get_openvino_model()

labels = self.task_environment.get_labels()
self.normal_label = [label for label in labels if not label.is_anomalous][0]
Expand Down Expand Up @@ -172,15 +168,13 @@ def infer(self, dataset: DatasetEntity, inference_parameters: InferenceParameter
if inference_parameters is not None:
update_progress_callback = inference_parameters.update_progress # type: ignore

# This always assumes that threshold is available in the task environment's model
meta_data = self.get_metadata()
for idx, dataset_item in enumerate(dataset):
image_result = self.inferencer.predict(dataset_item.numpy, metadata=meta_data)
image_result: AnomalyResult = self.inference_model(dataset_item.numpy)

# TODO: inferencer should return predicted label and mask
pred_label = image_result.pred_score >= 0.5
pred_mask = (image_result.anomaly_map >= 0.5).astype(np.uint8)
probability = image_result.pred_score if pred_label else 1 - image_result.pred_score
pred_label = image_result.pred_label
pred_mask = image_result.pred_mask
probability = image_result.pred_score if pred_label == "Anomaly" else 1 - image_result.pred_score
ashwinvaidya17 marked this conversation as resolved.
Show resolved Hide resolved
if self.task_type == TaskType.ANOMALY_CLASSIFICATION:
label = self.anomalous_label if image_result.pred_score >= 0.5 else self.normal_label
elif self.task_type == TaskType.ANOMALY_SEGMENTATION:
Expand Down Expand Up @@ -212,20 +206,6 @@ def infer(self, dataset: DatasetEntity, inference_parameters: InferenceParameter

return dataset

def get_metadata(self) -> Dict:
"""Get Meta Data."""
metadata = {}
if self.task_environment.model is not None:
metadata = json.loads(self.task_environment.model.get_data("metadata").decode())
metadata["image_threshold"] = np.array(metadata["image_threshold"], dtype=np.float32).item()
metadata["pixel_threshold"] = np.array(metadata["pixel_threshold"], dtype=np.float32).item()
metadata["min"] = np.array(metadata["min"], dtype=np.float32).item()
metadata["max"] = np.array(metadata["max"], dtype=np.float32).item()
else:
raise ValueError("Cannot access meta-data. self.task_environment.model is empty.")

return metadata

def evaluate(self, output_resultset: ResultSetEntity, evaluation_metric: Optional[str] = None):
"""Evaluate the performance of the model.

Expand Down Expand Up @@ -287,7 +267,7 @@ def optimize(
)

logger.info("Starting PTQ optimization.")
data_loader = OTXOpenVINOAnomalyDataloader(dataset=dataset, inferencer=self.inferencer)
data_loader = OTXOpenVINOAnomalyDataloader(dataset=dataset)
quantization_dataset = nncf.Dataset(data_loader, lambda data: data[1])

with tempfile.TemporaryDirectory() as tempdir:
Expand Down Expand Up @@ -322,33 +302,29 @@ def optimize(
self.__load_weights(path=os.path.join(tempdir, "model.bin"), output_model=output_model, key="openvino.bin")

output_model.set_data("label_schema.json", label_schema_to_bytes(self.task_environment.label_schema))
output_model.set_data("metadata", self.task_environment.model.get_data("metadata"))
output_model.model_format = ModelFormat.OPENVINO
output_model.optimization_type = ModelOptimizationType.POT
output_model.optimization_methods = [OptimizationMethod.QUANTIZATION]
output_model.precision = [ModelPrecision.INT8]

self.task_environment.model = output_model
self.inferencer = self.load_inferencer()
self.inference_model = self.get_openvino_model()

if optimization_parameters is not None:
optimization_parameters.update_progress(100, None)
logger.info("PTQ optimization completed")

def load_inferencer(self) -> OpenVINOInferencer:
def get_openvino_model(self) -> AnomalyDetection:
"""Create the OpenVINO inferencer object.

Returns:
OpenVINOInferencer object
AnomalyDetection model
"""
if self.task_environment.model is None:
raise Exception("task_environment.model is None. Cannot load weights.")
return OpenVINOInferencer(
path=(
self.task_environment.model.get_data("openvino.xml"),
self.task_environment.model.get_data("openvino.bin"),
),
metadata=self.get_metadata(),
return AnomalyDetection.create_model(
model=self.task_environment.model.get_data("openvino.xml"),
weights_path=self.task_environment.model.get_data("openvino.bin"),
)

@staticmethod
Expand Down Expand Up @@ -379,19 +355,10 @@ def _get_openvino_configuration(self) -> Dict[str, Any]:
if self.task_environment.model is None:
raise Exception("task_environment.model is None. Cannot get configuration.")

configuration = {
"metadata": self.get_metadata(),
configuration: Dict[str, Any] = {
"labels": LabelSchemaMapper.forward(self.task_environment.label_schema),
"threshold": 0.5,
}

if "transforms" not in self.config.keys():
configuration["mean_values"] = list(np.array([0.485, 0.456, 0.406]) * 255)
configuration["scale_values"] = list(np.array([0.229, 0.224, 0.225]) * 255)
else:
configuration["mean_values"] = self.config.transforms.mean
configuration["scale_values"] = self.config.transforms.std

return configuration

def deploy(self, output_model: ModelEntity) -> None:
Expand All @@ -413,7 +380,7 @@ def deploy(self, output_model: ModelEntity) -> None:

task_type = str(self.task_type).lower()

parameters["type_of_model"] = task_type
parameters["type_of_model"] = "AnomalyDetection"
sungmanc marked this conversation as resolved.
Show resolved Hide resolved
parameters["converter_type"] = task_type.upper()
parameters["model_parameters"] = self._get_openvino_configuration()
zip_buffer = io.BytesIO()
Expand All @@ -422,17 +389,6 @@ def deploy(self, output_model: ModelEntity) -> None:
arch.writestr(os.path.join("model", "model.xml"), self.task_environment.model.get_data("openvino.xml"))
arch.writestr(os.path.join("model", "model.bin"), self.task_environment.model.get_data("openvino.bin"))
arch.writestr(os.path.join("model", "config.json"), json.dumps(parameters, ensure_ascii=False, indent=4))
# model_wrappers files
for root, _, files in os.walk(
os.path.dirname(otx.algorithms.anomaly.adapters.anomalib.exportable_code.__file__)
):
if "__pycache__" in root:
continue
for file in files:
file_path = os.path.join(root, file)
arch.write(
file_path, os.path.join("python", "model_wrappers", file_path.split("exportable_code/")[1])
)
# other python files
arch.write(os.path.join(work_dir, "requirements.txt"), os.path.join("python", "requirements.txt"))
arch.write(os.path.join(work_dir, "LICENSE"), os.path.join("python", "LICENSE"))
Expand Down
Loading