Skip to content

Commit

Permalink
Fix >100% confidence issue for OpenVINO inference (#667)
Browse files Browse the repository at this point in the history
* fix openvino inference by normalizing predicted scores

* typing

* add test case

* value error for unknown task type
  • Loading branch information
djdameln authored Nov 7, 2022
1 parent 3de3eef commit aeb191f
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 20 deletions.
28 changes: 14 additions & 14 deletions anomalib/deploy/inferencers/base_inferencer.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,40 +125,40 @@ def __call__(self, image: np.ndarray) -> ImageResult:

def _normalize(
self,
anomaly_maps: Union[Tensor, np.ndarray],
pred_scores: Union[Tensor, np.float32],
meta_data: Union[Dict, DictConfig],
) -> Tuple[Union[np.ndarray, Tensor], float]:
anomaly_maps: Optional[Union[Tensor, np.ndarray]] = None,
) -> Tuple[Optional[Union[np.ndarray, Tensor]], float]:
"""Applies normalization and resizes the image.
Args:
anomaly_maps (Union[Tensor, np.ndarray]): Predicted raw anomaly map.
pred_scores (Union[Tensor, np.float32]): Predicted anomaly score
meta_data (Dict): Meta data. Post-processing step sometimes requires
additional meta data such as image shape. This variable comprises such info.
anomaly_maps (Optional[Union[Tensor, np.ndarray]]): Predicted raw anomaly map.
Returns:
Tuple[Union[np.ndarray, Tensor], float]: Post processed predictions that are ready to be visualized and
predicted scores.
Tuple[Optional[Union[np.ndarray, Tensor], float]]: Post processed predictions that are ready to be
visualized and predicted scores.
"""

# min max normalization
if "min" in meta_data and "max" in meta_data:
anomaly_maps = normalize_min_max(
anomaly_maps, meta_data["pixel_threshold"], meta_data["min"], meta_data["max"]
)
if anomaly_maps is not None:
anomaly_maps = normalize_min_max(
anomaly_maps, meta_data["pixel_threshold"], meta_data["min"], meta_data["max"]
)
pred_scores = normalize_min_max(
pred_scores, meta_data["image_threshold"], meta_data["min"], meta_data["max"]
)

# standardize pixel scores
if "pixel_mean" in meta_data.keys() and "pixel_std" in meta_data.keys():
anomaly_maps = standardize(
anomaly_maps, meta_data["pixel_mean"], meta_data["pixel_std"], center_at=meta_data["image_mean"]
)
anomaly_maps = normalize_cdf(anomaly_maps, meta_data["pixel_threshold"])
if anomaly_maps is not None:
anomaly_maps = standardize(
anomaly_maps, meta_data["pixel_mean"], meta_data["pixel_std"], center_at=meta_data["image_mean"]
)
anomaly_maps = normalize_cdf(anomaly_maps, meta_data["pixel_threshold"])

# standardize image scores
if "image_mean" in meta_data.keys() and "image_std" in meta_data.keys():
Expand Down
13 changes: 10 additions & 3 deletions anomalib/deploy/inferencers/openvino_inferencer.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def post_process(
# classification, and the value is the classification prediction score.
if len(predictions.shape) == 1:
task = "classification"
pred_score = predictions.item()
pred_score = predictions
else:
task = "segmentation"
anomaly_map = predictions.squeeze()
Expand All @@ -159,11 +159,16 @@ def post_process(
if "image_threshold" in meta_data:
pred_label = pred_score >= meta_data["image_threshold"]

if task == "segmentation":
if task == "classification":
_, pred_score = self._normalize(pred_scores=pred_score, meta_data=meta_data)
elif task == "segmentation":
if "pixel_threshold" in meta_data:
pred_mask = (anomaly_map >= meta_data["pixel_threshold"]).astype(np.uint8)

anomaly_map, pred_score = self._normalize(anomaly_map, pred_score, meta_data)
anomaly_map, pred_score = self._normalize(
pred_scores=pred_score, anomaly_maps=anomaly_map, meta_data=meta_data
)
assert anomaly_map is not None

if "image_shape" in meta_data and anomaly_map.shape != meta_data["image_shape"]:
image_height = meta_data["image_shape"][0]
Expand All @@ -172,6 +177,8 @@ def post_process(

if pred_mask is not None:
pred_mask = cv2.resize(pred_mask, (image_width, image_height))
else:
raise ValueError(f"Unknown task type: {task}")

return {
"anomaly_map": anomaly_map,
Expand Down
2 changes: 1 addition & 1 deletion anomalib/deploy/inferencers/torch_inferencer.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def post_process(self, predictions: Tensor, meta_data: Optional[Union[Dict, Dict
pred_mask = (anomaly_map >= meta_data["pixel_threshold"]).squeeze().astype(np.uint8)

anomaly_map = anomaly_map.squeeze()
anomaly_map, pred_score = self._normalize(anomaly_map, pred_score, meta_data)
anomaly_map, pred_score = self._normalize(anomaly_maps=anomaly_map, pred_scores=pred_score, meta_data=meta_data)

if isinstance(anomaly_map, Tensor):
anomaly_map = anomaly_map.detach().cpu().numpy()
Expand Down
6 changes: 4 additions & 2 deletions tests/pre_merge/deploy/test_inferencer.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,8 @@ def test_torch_inference(self, model_name: str, category: str = "shapes", path:
torch_dataloader = MockImageLoader(model_config.dataset.image_size, total_count=1)
with torch.no_grad():
for image in torch_dataloader():
torch_inferencer.predict(image)
prediction = torch_inferencer.predict(image)
assert 0.0 <= prediction.pred_score <= 1.0 # confirm if predicted scores are normalized

@pytest.mark.parametrize(
"model_name",
Expand Down Expand Up @@ -116,4 +117,5 @@ def test_openvino_inference(self, model_name: str, category: str = "shapes", pat
)
openvino_dataloader = MockImageLoader(model_config.dataset.image_size, total_count=1)
for image in openvino_dataloader():
openvino_inferencer.predict(image)
prediction = openvino_inferencer.predict(image)
assert 0.0 <= prediction.pred_score <= 1.0 # confirm if predicted scores are normalized

0 comments on commit aeb191f

Please sign in to comment.