-
Notifications
You must be signed in to change notification settings - Fork 446
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
✨ [ANOMALY] Add DRAEM task implementation (#1203)
* Add initial support for anomalib 0.3.3 * Refactor directory structure * revert path * 🚚 Move python configs * Replace LabelNames with is_anomalous * Rename model_config_path to config_path * 🚚 Move tasks from tools to root * Refactor configs * 🚚 refactor templates + configs structure * Upgrade torch version * fix url * Update template paths + add nncf to requirements * Update label mapper * Fix nncf task + address tests * Revert to previous torch version + add torchtext dependency * Upgrade anomalib version * Revert torchtext removal * Fix openvino inference * Upgrade to 0.3.5 * Rename task * Limit pytorch lightning versio * add config files and templates for Draem model * formatting * properly load transform config * remove backbone parameter from base config * set transform in config adapter instead of data adapter * fix transform config * formatting * add compression config * change default parameter values * use short license header * rename draem templates Co-authored-by: Ashwin Vaidya <ashwinitinvaidya@gmail.com> Co-authored-by: Ashwin Vaidya <ashwin.vaidya@intel.com>
- Loading branch information
1 parent
0c4a04c
commit 1d54006
Showing
15 changed files
with
548 additions
and
12 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,11 @@ | ||
""" | ||
Base configuration parameters for Draem | ||
""" | ||
|
||
# Copyright (C) 2021-2022 Intel Corporation | ||
# SPDX-License-Identifier: Apache-2.0 | ||
|
||
|
||
from .configuration import DraemAnomalyBaseConfig | ||
|
||
__all__ = ["DraemAnomalyBaseConfig"] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
{ | ||
"base": { | ||
"find_unused_parameters": true, | ||
"target_metric_name": "image_F1Score", | ||
"nncf_config": { | ||
"input_info": { | ||
"sample_size": [1, 3, 256, 256] | ||
}, | ||
"compression": [], | ||
"log_dir": "/tmp" | ||
} | ||
}, | ||
"nncf_quantization": { | ||
"model": { | ||
"lr": 0.004 | ||
}, | ||
"nncf_config": { | ||
"compression": [ | ||
{ | ||
"algorithm": "quantization", | ||
"preset": "mixed", | ||
"initializer": { | ||
"range": { | ||
"num_init_samples": 250 | ||
}, | ||
"batchnorm_adaptation": { | ||
"num_bn_adaptation_samples": 250 | ||
} | ||
}, | ||
"ignored_scopes": [] | ||
} | ||
] | ||
} | ||
}, | ||
"order_of_parts": ["nncf_quantization"] | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,101 @@ | ||
""" | ||
Configurable parameters for Draem anomaly task | ||
""" | ||
|
||
# Copyright (C) 2021-2022 Intel Corporation | ||
# SPDX-License-Identifier: Apache-2.0 | ||
|
||
|
||
from attr import attrs | ||
from configs.base import BaseAnomalyConfig | ||
from configs.base.configuration_enums import EarlyStoppingMetrics | ||
from ote_sdk.configuration.elements import ( | ||
ParameterGroup, | ||
add_parameter_group, | ||
configurable_float, | ||
configurable_integer, | ||
selectable, | ||
string_attribute, | ||
) | ||
from ote_sdk.configuration.model_lifecycle import ModelLifecycle | ||
|
||
|
||
@attrs | ||
class DraemAnomalyBaseConfig(BaseAnomalyConfig): | ||
""" | ||
Configurable parameters for DRAEM anomaly classification task. | ||
""" | ||
|
||
header = string_attribute("Configuration for Draem") | ||
description = header | ||
|
||
@attrs | ||
class LearningParameters(BaseAnomalyConfig.LearningParameters): | ||
"""Parameters that can be tuned using HPO.""" | ||
|
||
header = string_attribute("Learning Parameters") | ||
description = header | ||
|
||
train_batch_size = configurable_integer( | ||
default_value=8, | ||
min_value=1, | ||
max_value=512, | ||
header="Batch size", | ||
description="The number of training samples seen in each iteration of training. Increasing this value " | ||
"improves training time and may make the training more stable. A larger batch size has higher " | ||
"memory requirements.", | ||
warning="Increasing this value may cause the system to use more memory than available, " | ||
"potentially causing out of memory errors, please update with caution.", | ||
affects_outcome_of=ModelLifecycle.TRAINING, | ||
) | ||
|
||
lr = configurable_float( | ||
default_value=0.0001, | ||
header="Learning Rate", | ||
min_value=1e-4, | ||
max_value=1, | ||
description="Learning rate used for optimizing the network.", | ||
) | ||
|
||
@attrs | ||
class EarlyStoppingParameters(ParameterGroup): | ||
""" | ||
Early stopping parameters | ||
""" | ||
|
||
header = string_attribute("Early Stopping Parameters") | ||
description = header | ||
|
||
metric = selectable( | ||
default_value=EarlyStoppingMetrics.IMAGE_ROC_AUC, | ||
header="Early Stopping Metric", | ||
description="The metric used to determine if the model should stop training", | ||
) | ||
|
||
patience = configurable_integer( | ||
default_value=20, | ||
min_value=1, | ||
max_value=100, | ||
header="Early Stopping Patience", | ||
description="Number of epochs to wait for an improvement in the monitored metric. If the metric has " | ||
"not improved for this many epochs, the training will stop and the best model will be " | ||
"returned.", | ||
warning="Setting this value too low might lead to underfitting. Setting the value too high will " | ||
"increase the training time and might lead to overfitting.", | ||
affects_outcome_of=ModelLifecycle.TRAINING, | ||
) | ||
|
||
early_stopping = add_parameter_group(EarlyStoppingParameters) | ||
|
||
max_epochs = configurable_integer( | ||
default_value=700, | ||
header="Max Epochs", | ||
min_value=1, | ||
max_value=700, | ||
description="Maximum number of epochs to train the model for.", | ||
warning="Training for very few epochs might lead to poor performance. If Early Stopping is enabled then " | ||
"increasing the value of max epochs might not lead to desired result.", | ||
affects_outcome_of=ModelLifecycle.TRAINING, | ||
) | ||
|
||
learning_parameters = add_parameter_group(LearningParameters) |
Oops, something went wrong.