Skip to content

Commit

Permalink
✨ [ANOMALY] Add DRAEM task implementation (#1203)
Browse files Browse the repository at this point in the history
* Add initial support for anomalib 0.3.3

* Refactor directory structure

* revert path

* 🚚 Move python configs

* Replace LabelNames with is_anomalous

* Rename model_config_path to config_path

* 🚚 Move tasks from tools to root

* Refactor configs

* 🚚 refactor templates + configs structure

* Upgrade torch version

* fix url

* Update template paths + add nncf to requirements

* Update label mapper

* Fix nncf task + address tests

* Revert to previous torch version + add torchtext dependency

* Upgrade anomalib version

* Revert torchtext removal

* Fix openvino inference

* Upgrade to 0.3.5

* Rename task

* Limit pytorch lightning versio

* add config files and templates for Draem model

* formatting

* properly load transform config

* remove backbone parameter from base config

* set transform in config adapter instead of data adapter

* fix transform config

* formatting

* add compression config

* change default parameter values

* use short license header

* rename draem templates

Co-authored-by: Ashwin Vaidya <ashwinitinvaidya@gmail.com>
Co-authored-by: Ashwin Vaidya <ashwin.vaidya@intel.com>
  • Loading branch information
3 people authored Aug 11, 2022
1 parent 0c4a04c commit 1d54006
Show file tree
Hide file tree
Showing 15 changed files with 548 additions and 12 deletions.
7 changes: 7 additions & 0 deletions external/anomaly/adapters/anomalib/config/anomalib_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,13 @@ def get_anomalib_config(task_name: str, ote_config: ConfigurableParameters) -> U
"""
config_path = Path(anomalib.__file__).parent / "models" / task_name.lower() / "config.yaml"
anomalib_config = get_configurable_parameters(model_name=task_name.lower(), config_path=config_path)
# TODO: remove this hard coding of the config location
if anomalib_config.model.name == "draem":
anomalib_config.dataset.transform_config.train = "external/anomaly/configs/draem/transform_config.yaml"
anomalib_config.dataset.transform_config.val = "external/anomaly/configs/draem/transform_config.yaml"
else:
anomalib_config.dataset.transform_config.train = None
anomalib_config.dataset.transform_config.val = None
update_anomalib_config(anomalib_config, ote_config)
return anomalib_config

Expand Down
3 changes: 2 additions & 1 deletion external/anomaly/adapters/anomalib/data/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,9 @@ def __init__(self, config: Union[DictConfig, ListConfig], dataset: DatasetEntity
self.dataset = dataset
self.task_type = task_type

# TODO: distinguish between train and val config here
self.pre_processor = PreProcessor(
config=config.transform if "transform" in config.keys() else None,
config=config.dataset.transform_config.train,
image_size=tuple(config.dataset.image_size),
to_tensor=True,
)
Expand Down
8 changes: 1 addition & 7 deletions external/anomaly/configs/base/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from sys import maxsize

from attr import attrs
from configs.base.configuration_enums import ModelBackbone, POTQuantizationPreset
from configs.base.configuration_enums import POTQuantizationPreset
from ote_sdk.configuration import ConfigurableParameters
from ote_sdk.configuration.elements import (
ParameterGroup,
Expand Down Expand Up @@ -58,12 +58,6 @@ class LearningParameters(ParameterGroup):
affects_outcome_of=ModelLifecycle.TRAINING,
)

backbone = selectable(
default_value=ModelBackbone.RESNET18,
header="Model Backbone",
description="Pre-trained backbone used for feature extraction",
)

@attrs
class DatasetParameters(ParameterGroup):
"""Parameters related to dataloader."""
Expand Down
11 changes: 11 additions & 0 deletions external/anomaly/configs/draem/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
"""
Base configuration parameters for Draem
"""

# Copyright (C) 2021-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0


from .configuration import DraemAnomalyBaseConfig

__all__ = ["DraemAnomalyBaseConfig"]
36 changes: 36 additions & 0 deletions external/anomaly/configs/draem/compression_config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
{
"base": {
"find_unused_parameters": true,
"target_metric_name": "image_F1Score",
"nncf_config": {
"input_info": {
"sample_size": [1, 3, 256, 256]
},
"compression": [],
"log_dir": "/tmp"
}
},
"nncf_quantization": {
"model": {
"lr": 0.004
},
"nncf_config": {
"compression": [
{
"algorithm": "quantization",
"preset": "mixed",
"initializer": {
"range": {
"num_init_samples": 250
},
"batchnorm_adaptation": {
"num_bn_adaptation_samples": 250
}
},
"ignored_scopes": []
}
]
}
},
"order_of_parts": ["nncf_quantization"]
}
101 changes: 101 additions & 0 deletions external/anomaly/configs/draem/configuration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
"""
Configurable parameters for Draem anomaly task
"""

# Copyright (C) 2021-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0


from attr import attrs
from configs.base import BaseAnomalyConfig
from configs.base.configuration_enums import EarlyStoppingMetrics
from ote_sdk.configuration.elements import (
ParameterGroup,
add_parameter_group,
configurable_float,
configurable_integer,
selectable,
string_attribute,
)
from ote_sdk.configuration.model_lifecycle import ModelLifecycle


@attrs
class DraemAnomalyBaseConfig(BaseAnomalyConfig):
"""
Configurable parameters for DRAEM anomaly classification task.
"""

header = string_attribute("Configuration for Draem")
description = header

@attrs
class LearningParameters(BaseAnomalyConfig.LearningParameters):
"""Parameters that can be tuned using HPO."""

header = string_attribute("Learning Parameters")
description = header

train_batch_size = configurable_integer(
default_value=8,
min_value=1,
max_value=512,
header="Batch size",
description="The number of training samples seen in each iteration of training. Increasing this value "
"improves training time and may make the training more stable. A larger batch size has higher "
"memory requirements.",
warning="Increasing this value may cause the system to use more memory than available, "
"potentially causing out of memory errors, please update with caution.",
affects_outcome_of=ModelLifecycle.TRAINING,
)

lr = configurable_float(
default_value=0.0001,
header="Learning Rate",
min_value=1e-4,
max_value=1,
description="Learning rate used for optimizing the network.",
)

@attrs
class EarlyStoppingParameters(ParameterGroup):
"""
Early stopping parameters
"""

header = string_attribute("Early Stopping Parameters")
description = header

metric = selectable(
default_value=EarlyStoppingMetrics.IMAGE_ROC_AUC,
header="Early Stopping Metric",
description="The metric used to determine if the model should stop training",
)

patience = configurable_integer(
default_value=20,
min_value=1,
max_value=100,
header="Early Stopping Patience",
description="Number of epochs to wait for an improvement in the monitored metric. If the metric has "
"not improved for this many epochs, the training will stop and the best model will be "
"returned.",
warning="Setting this value too low might lead to underfitting. Setting the value too high will "
"increase the training time and might lead to overfitting.",
affects_outcome_of=ModelLifecycle.TRAINING,
)

early_stopping = add_parameter_group(EarlyStoppingParameters)

max_epochs = configurable_integer(
default_value=700,
header="Max Epochs",
min_value=1,
max_value=700,
description="Maximum number of epochs to train the model for.",
warning="Training for very few epochs might lead to poor performance. If Early Stopping is enabled then "
"increasing the value of max epochs might not lead to desired result.",
affects_outcome_of=ModelLifecycle.TRAINING,
)

learning_parameters = add_parameter_group(LearningParameters)
Loading

0 comments on commit 1d54006

Please sign in to comment.