Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ANOMALY] Add DRAEM task implementation #1203

Merged
merged 36 commits into from
Aug 11, 2022
Merged
Show file tree
Hide file tree
Changes from 34 commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
41e245e
Add initial support for anomalib 0.3.3
Jul 14, 2022
2dd51ea
Refactor directory structure
Jul 15, 2022
ee1451b
revert path
Jul 15, 2022
fa206b7
🚚 Move python configs
Jul 18, 2022
71ea064
Replace LabelNames with is_anomalous
Jul 18, 2022
ba44ef6
Rename model_config_path to config_path
Jul 18, 2022
e79587d
🚚 Move tasks from tools to root
Jul 18, 2022
cad5502
Refactor configs
Jul 18, 2022
ba4ee94
🚚 refactor templates + configs structure
ashwinvaidya17 Jul 20, 2022
aa33c1f
Upgrade torch version
ashwinvaidya17 Jul 20, 2022
e0e34f8
Merge branch 'develop' into ashwin/upgrade_anomalib
ashwinvaidya17 Jul 20, 2022
a861026
fix url
ashwinvaidya17 Jul 20, 2022
237a2ee
Update template paths + add nncf to requirements
ashwinvaidya17 Jul 20, 2022
d16716a
Update label mapper
ashwinvaidya17 Jul 21, 2022
d71f91e
Fix nncf task + address tests
ashwinvaidya17 Jul 21, 2022
c8adf78
Revert to previous torch version + add torchtext dependency
ashwinvaidya17 Jul 25, 2022
5f29f13
Upgrade anomalib version
ashwinvaidya17 Aug 1, 2022
15b3fd9
Merge branch 'develop' into ashwin/upgrade_anomalib
ashwinvaidya17 Aug 1, 2022
317aa81
Revert torchtext removal
ashwinvaidya17 Aug 1, 2022
9388114
Fix openvino inference
ashwinvaidya17 Aug 2, 2022
f1e9711
Upgrade to 0.3.5
ashwinvaidya17 Aug 2, 2022
8c4adb1
Rename task
ashwinvaidya17 Aug 3, 2022
abbc16d
Limit pytorch lightning versio
ashwinvaidya17 Aug 3, 2022
314a10e
add config files and templates for Draem model
djdameln Aug 3, 2022
25ec05c
formatting
djdameln Aug 3, 2022
6e2cb9d
properly load transform config
djdameln Aug 3, 2022
d900f3e
remove backbone parameter from base config
djdameln Aug 3, 2022
fdb7843
rebase
djdameln Aug 3, 2022
4fd3866
set transform in config adapter instead of data adapter
djdameln Aug 8, 2022
c6fd106
fix transform config
djdameln Aug 8, 2022
8d43474
formatting
djdameln Aug 8, 2022
0af9907
add compression config
djdameln Aug 8, 2022
7d1572c
merge develop
djdameln Aug 9, 2022
9e43b72
change default parameter values
djdameln Aug 9, 2022
12b5dba
use short license header
djdameln Aug 10, 2022
9e7d4b1
rename draem templates
djdameln Aug 10, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,13 @@ def get_anomalib_config(task_name: str, ote_config: ConfigurableParameters) -> U
"""
config_path = Path(anomalib.__file__).parent / "models" / task_name.lower() / "config.yaml"
anomalib_config = get_configurable_parameters(model_name=task_name.lower(), config_path=config_path)
# TODO: remove this hard coding of the config location
if anomalib_config.model.name == "draem":
anomalib_config.dataset.transform_config.train = "external/anomaly/configs/draem/transform_config.yaml"
anomalib_config.dataset.transform_config.val = "external/anomaly/configs/draem/transform_config.yaml"
else:
anomalib_config.dataset.transform_config.train = None
anomalib_config.dataset.transform_config.val = None
update_anomalib_config(anomalib_config, ote_config)
return anomalib_config

Expand Down
3 changes: 2 additions & 1 deletion external/anomaly/adapters/anomalib/data/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,9 @@ def __init__(self, config: Union[DictConfig, ListConfig], dataset: DatasetEntity
self.dataset = dataset
self.task_type = task_type

# TODO: distinguish between train and val config here
self.pre_processor = PreProcessor(
config=config.transform if "transform" in config.keys() else None,
config=config.dataset.transform_config.train,
image_size=tuple(config.dataset.image_size),
to_tensor=True,
)
Expand Down
8 changes: 1 addition & 7 deletions external/anomaly/configs/base/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from sys import maxsize

from attr import attrs
from configs.base.configuration_enums import ModelBackbone, POTQuantizationPreset
from configs.base.configuration_enums import POTQuantizationPreset
from ote_sdk.configuration import ConfigurableParameters
from ote_sdk.configuration.elements import (
ParameterGroup,
Expand Down Expand Up @@ -62,12 +62,6 @@ class LearningParameters(ParameterGroup):
affects_outcome_of=ModelLifecycle.TRAINING,
)

backbone = selectable(
default_value=ModelBackbone.RESNET18,
header="Model Backbone",
description="Pre-trained backbone used for feature extraction",
)

@attrs
class DatasetParameters(ParameterGroup):
"""
Expand Down
21 changes: 21 additions & 0 deletions external/anomaly/configs/draem/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
"""
Base configuration parameters for Draem
"""

# Copyright (C) 2022 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.
djdameln marked this conversation as resolved.
Show resolved Hide resolved

from .configuration import DraemAnomalyBaseConfig

__all__ = ["DraemAnomalyBaseConfig"]
36 changes: 36 additions & 0 deletions external/anomaly/configs/draem/compression_config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
{
"base": {
"find_unused_parameters": true,
"target_metric_name": "image_F1Score",
"nncf_config": {
"input_info": {
"sample_size": [1, 3, 256, 256]
},
"compression": [],
"log_dir": "/tmp"
}
},
"nncf_quantization": {
"model": {
"lr": 0.004
},
"nncf_config": {
"compression": [
{
"algorithm": "quantization",
"preset": "mixed",
"initializer": {
"range": {
"num_init_samples": 250
},
"batchnorm_adaptation": {
"num_bn_adaptation_samples": 250
}
},
"ignored_scopes": []
}
]
}
},
"order_of_parts": ["nncf_quantization"]
}
111 changes: 111 additions & 0 deletions external/anomaly/configs/draem/configuration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
"""
Configurable parameters for Draem anomaly task
"""

# Copyright (C) 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.

from attr import attrs
from configs.base import BaseAnomalyConfig
from configs.base.configuration_enums import EarlyStoppingMetrics
from ote_sdk.configuration.elements import (
ParameterGroup,
add_parameter_group,
configurable_float,
configurable_integer,
selectable,
string_attribute,
)
from ote_sdk.configuration.model_lifecycle import ModelLifecycle


@attrs
class DraemAnomalyBaseConfig(BaseAnomalyConfig):
"""
Configurable parameters for DRAEM anomaly classification task.
"""

header = string_attribute("Configuration for Draem")
description = header

@attrs
class LearningParameters(BaseAnomalyConfig.LearningParameters):
"""Parameters that can be tuned using HPO."""

header = string_attribute("Learning Parameters")
description = header

train_batch_size = configurable_integer(
default_value=8,
min_value=1,
max_value=512,
header="Batch size",
description="The number of training samples seen in each iteration of training. Increasing this value "
"improves training time and may make the training more stable. A larger batch size has higher "
"memory requirements.",
warning="Increasing this value may cause the system to use more memory than available, "
"potentially causing out of memory errors, please update with caution.",
affects_outcome_of=ModelLifecycle.TRAINING,
)

lr = configurable_float(
default_value=0.0001,
header="Learning Rate",
min_value=1e-4,
max_value=1,
description="Learning rate used for optimizing the network.",
)

@attrs
class EarlyStoppingParameters(ParameterGroup):
"""
Early stopping parameters
"""

header = string_attribute("Early Stopping Parameters")
description = header

metric = selectable(
default_value=EarlyStoppingMetrics.IMAGE_ROC_AUC,
header="Early Stopping Metric",
description="The metric used to determine if the model should stop training",
)

patience = configurable_integer(
default_value=20,
min_value=1,
max_value=100,
header="Early Stopping Patience",
description="Number of epochs to wait for an improvement in the monitored metric. If the metric has "
"not improved for this many epochs, the training will stop and the best model will be "
"returned.",
warning="Setting this value too low might lead to underfitting. Setting the value too high will "
"increase the training time and might lead to overfitting.",
affects_outcome_of=ModelLifecycle.TRAINING,
)

early_stopping = add_parameter_group(EarlyStoppingParameters)

max_epochs = configurable_integer(
default_value=700,
header="Max Epochs",
min_value=1,
max_value=700,
description="Maximum number of epochs to train the model for.",
warning="Training for very few epochs might lead to poor performance. If Early Stopping is enabled then "
"increasing the value of max epochs might not lead to desired result.",
affects_outcome_of=ModelLifecycle.TRAINING,
)

learning_parameters = add_parameter_group(LearningParameters)
Loading