Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[OTE Enhancements] - Add pydocstyle to pre-commit configurations. #1206

Merged
merged 1 commit into from
Aug 10, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -85,3 +85,15 @@ repos:
types: [python]
args: ["--score=no"]
exclude: "tests"

- repo: https://github.com/PyCQA/pydocstyle
rev: 6.1.1
hooks:
- id: pydocstyle
additional_dependencies: [toml]
name: pydocstyle
entry: pydocstyle
language: python
types: [python]
files: '^external/anomaly/.*\.py'
exclude: "external/anomaly/tests"
4 changes: 1 addition & 3 deletions external/anomaly/adapters/anomalib/callbacks/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
"""
Callbacks for OTE inference
"""
"""Callbacks for OTE inference."""

# Copyright (C) 2021 Intel Corporation
#
Expand Down
4 changes: 1 addition & 3 deletions external/anomaly/adapters/anomalib/callbacks/inference.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
"""
Inference Callbacks for OTE inference
"""
"""Inference Callbacks for OTE inference."""

# Copyright (C) 2021 Intel Corporation
#
Expand Down
37 changes: 11 additions & 26 deletions external/anomaly/adapters/anomalib/callbacks/progress.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
"""
Progressbar Callback for OTE task
"""
"""Progressbar Callback for OTE task."""

# Copyright (C) 2021 Intel Corporation
#
Expand All @@ -24,8 +22,9 @@


class ProgressCallback(TQDMProgressBar):
"""
Modifies progress callback to show completion of the entire training step
"""Progress Callback.

Modify progress callback to show completion of the entire training step.
"""

def __init__(self, parameters: Optional[Union[TrainParameters, InferenceParameters]] = None) -> None:
Expand All @@ -40,61 +39,47 @@ def __init__(self, parameters: Optional[Union[TrainParameters, InferenceParamete
self.update_progress_callback = default_progress_callback

def on_train_start(self, trainer, pl_module):
"""
Store max epochs and current epoch from trainer
"""
"""Store max epochs and current epoch from trainer."""
super().on_train_start(trainer, pl_module)
self.current_epoch = trainer.current_epoch
self.max_epochs = trainer.max_epochs
self._reset_progress()

def on_predict_start(self, trainer, pl_module):
"""
Reset progress bar when prediction starts.
"""
"""Reset progress bar when prediction starts."""
super().on_predict_start(trainer, pl_module)
self._reset_progress()

def on_test_start(self, trainer, pl_module):
"""
Reset progress bar when testing starts.
"""
"""Reset progress bar when testing starts."""
super().on_test_start(trainer, pl_module)
self._reset_progress()

def on_train_batch_end(self, trainer, pl_module, outputs, batch, batch_idx):
"""
Adds training completion percentage to the progress bar
"""
"""Adds training completion percentage to the progress bar."""
super().on_train_batch_end(trainer, pl_module, outputs, batch, batch_idx)
self.current_epoch = trainer.current_epoch
self._update_progress(stage="train")

def on_predict_batch_end(self, trainer, pl_module, outputs, batch, batch_idx, dataloader_idx):
"""
Adds prediction completion percentage to the progress bar
"""
"""Adds prediction completion percentage to the progress bar."""
super().on_predict_batch_end(trainer, pl_module, outputs, batch, batch_idx, dataloader_idx)
self._update_progress(stage="predict")

def on_test_batch_end(self, trainer, pl_module, outputs, batch, batch_idx, dataloader_idx):
"""
Adds testing completion percentage to the progress bar
"""
"""Adds testing completion percentage to the progress bar."""
super().on_test_batch_end(trainer, pl_module, outputs, batch, batch_idx, dataloader_idx)
self._update_progress(stage="test")

def _reset_progress(self):
self._progress = 0.0

def _get_progress(self, stage: str = "train") -> float:
"""
Get progress for train and test stages.
"""Get progress for train and test stages.

Args:
stage (str, optional): Train or Test stages. Defaults to "train".
"""

if stage == "train":
# Progress is calculated on the upper bound (max epoch).
# Early stopping might stop the training before the progress reaches 100%
Expand Down
10 changes: 3 additions & 7 deletions external/anomaly/adapters/anomalib/callbacks/score_report.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Score reporting callback"""
"""Score reporting callback."""

# Copyright (C) 2020 Intel Corporation
#
Expand All @@ -21,9 +21,7 @@


class ScoreReportingCallback(Callback):
"""
Callback for reporting score.
"""
"""Callback for reporting score."""

def __init__(self, parameters: Optional[TrainParameters] = None) -> None:
if parameters is not None:
Expand All @@ -32,9 +30,7 @@ def __init__(self, parameters: Optional[TrainParameters] = None) -> None:
self.score_reporting_callback = None

def on_validation_epoch_end(self, trainer, pl_module): # pylint: disable=unused-argument
"""
If score exists in trainer.logged_metrics, report the score.
"""
"""If score exists in trainer.logged_metrics, report the score."""
if self.score_reporting_callback is not None:
score = None
metric = getattr(self.score_reporting_callback, "metric", None)
Expand Down
4 changes: 1 addition & 3 deletions external/anomaly/adapters/anomalib/config/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
"""
Initialization of Configurable parameter conversion between OTE and Anomalib.
"""
"""Initialization of Configurable parameter conversion between OTE and Anomalib."""

# Copyright (C) 2021 Intel Corporation
#
Expand Down
30 changes: 18 additions & 12 deletions external/anomaly/adapters/anomalib/config/anomalib_config.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
"""
Configurable parameter conversion between OTE and Anomalib.
"""
"""Configurable parameter conversion between OTE and Anomalib."""

# Copyright (C) 2021 Intel Corporation
#
Expand All @@ -26,13 +24,18 @@


def get_anomalib_config(task_name: str, ote_config: ConfigurableParameters) -> Union[DictConfig, ListConfig]:
"""
Create an anomalib config object that matches the values specified in the OTE config.
"""Get anomalib configuration.

Create an anomalib config object that matches the values specified in the
OTE config.

Args:
ote_config: ConfigurableParameters: OTE config object parsed from configuration.yaml file
ote_config: ConfigurableParameters: OTE config object parsed from
configuration.yaml file

Returns:
Anomalib config object for the specified model type with overwritten default values.
Anomalib config object for the specified model type with overwritten
default values.
"""
config_path = Path(anomalib.__file__).parent / "models" / task_name.lower() / "config.yaml"
anomalib_config = get_configurable_parameters(model_name=task_name.lower(), config_path=config_path)
Expand All @@ -41,7 +44,7 @@ def get_anomalib_config(task_name: str, ote_config: ConfigurableParameters) -> U


def _anomalib_config_mapper(anomalib_config: Union[DictConfig, ListConfig], ote_config: ConfigurableParameters):
"""Returns mapping from learning parameters to anomalib parameters
"""Return mapping from learning parameters to anomalib parameters.

Args:
anomalib_config: DictConfig: Anomalib config object
Expand All @@ -64,13 +67,16 @@ def _anomalib_config_mapper(anomalib_config: Union[DictConfig, ListConfig], ote_


def update_anomalib_config(anomalib_config: Union[DictConfig, ListConfig], ote_config: ConfigurableParameters):
"""
Overwrite the default parameter values in the anomalib config with the values specified in the OTE config. The
function is recursively called for each parameter group present in the OTE config.
"""Update anomalib configuration.

Overwrite the default parameter values in the anomalib config with the
values specified in the OTE config. The function is recursively called for
each parameter group present in the OTE config.

Args:
anomalib_config: DictConfig: Anomalib config object
ote_config: ConfigurableParameters: OTE config object parsed from configuration.yaml file
ote_config: ConfigurableParameters: OTE config object parsed from
configuration.yaml file
"""
for param in ote_config.parameters:
assert param in anomalib_config.keys(), f"Parameter {param} not present in anomalib config."
Expand Down
4 changes: 1 addition & 3 deletions external/anomaly/adapters/anomalib/data/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
"""
Initialization of Anomaly Dataset Utils
"""
"""Initialization of Anomaly Dataset Utils."""

# Copyright (C) 2021 Intel Corporation
#
Expand Down
55 changes: 38 additions & 17 deletions external/anomaly/adapters/anomalib/data/data.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
"""
Anomaly Dataset Utils
"""
"""Anomaly Dataset Utils."""

# Copyright (C) 2021 Intel Corporation
#
Expand Down Expand Up @@ -39,8 +37,8 @@


class OTEAnomalyDataset(Dataset):
"""
Anomaly Dataset Adaptor
"""Anomaly Dataset Adaptor.

This class converts OTE Dataset into Anomalib dataset that
is a sub-class of Vision Dataset.

Expand Down Expand Up @@ -71,9 +69,25 @@ def __init__(self, config: Union[DictConfig, ListConfig], dataset: DatasetEntity
)

def __len__(self) -> int:
"""Get size of the dataset.

Returns:
int: Size of the dataset.
"""
return len(self.dataset)

def __getitem__(self, index: int) -> Dict[str, Union[int, Tensor]]:
"""Get dataset item.

Args:
index (int): Index of the dataset sample.

Raises:
ValueError: When the task type is not supported.

Returns:
Dict[str, Union[int, Tensor]]: Dataset item.
"""
dataset_item = self.dataset[index]
item: Dict[str, Union[int, Tensor]] = {}
item = {"index": index}
Expand All @@ -98,8 +112,8 @@ def __getitem__(self, index: int) -> Dict[str, Union[int, Tensor]]:


class OTEAnomalyDataModule(LightningDataModule):
"""
Anomaly DataModule
"""Anomaly DataModule.

This class converts OTE Dataset into Anomalib dataset and stores
train/val/test dataloaders.

Expand Down Expand Up @@ -131,8 +145,7 @@ def __init__(self, config: Union[DictConfig, ListConfig], dataset: DatasetEntity
self.predict_ote_dataset: DatasetEntity

def setup(self, stage: Optional[str] = None) -> None:
"""
Setup Anomaly Data Module
"""Setup Anomaly Data Module.

Args:
stage (Optional[str], optional): train/val/test stages.
Expand Down Expand Up @@ -172,8 +185,10 @@ def summary(self):
def train_dataloader(
self,
) -> Union[DataLoader, List[DataLoader], Dict[str, DataLoader]]:
"""
Train Dataloader
"""Train Dataloader.

Returns:
Union[DataLoader, List[DataLoader], Dict[str, DataLoader]]: Train dataloader.
"""
dataset = OTEAnomalyDataset(self.config, self.train_ote_dataset, self.task_type)
return DataLoader(
Expand All @@ -184,8 +199,10 @@ def train_dataloader(
)

def val_dataloader(self) -> Union[DataLoader, List[DataLoader]]:
"""
Validation Dataloader
"""Validation Dataloader.

Returns:
Union[DataLoader, List[DataLoader]]: Validation Dataloader.
"""
global_dataset, local_dataset = split_local_global_dataset(self.val_ote_dataset)
logger.info(f"Global annotations: {len(global_dataset)}")
Expand All @@ -204,8 +221,10 @@ def val_dataloader(self) -> Union[DataLoader, List[DataLoader]]:
)

def test_dataloader(self) -> Union[DataLoader, List[DataLoader]]:
"""
Test Dataloader
"""Test Dataloader.

Returns:
Union[DataLoader, List[DataLoader]]: Test Dataloader.
"""
dataset = OTEAnomalyDataset(self.config, self.test_ote_dataset, self.task_type)
return DataLoader(
Expand All @@ -216,8 +235,10 @@ def test_dataloader(self) -> Union[DataLoader, List[DataLoader]]:
)

def predict_dataloader(self) -> Union[DataLoader, List[DataLoader]]:
"""
Predict Dataloader
"""Predict Dataloader.

Returns:
Union[DataLoader, List[DataLoader]]: Predict Dataloader.
"""
dataset = OTEAnomalyDataset(self.config, self.predict_ote_dataset, self.task_type)
return DataLoader(
Expand Down
3 changes: 2 additions & 1 deletion external/anomaly/adapters/anomalib/data/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@ def __init__(
val_subset: Optional[Dict[str, str]] = None,
test_subset: Optional[Dict[str, str]] = None,
):
"""
"""Base Anomaly Dataset.

Args:
train_subset (Optional[Dict[str, str]], optional): Path to annotation
and dataset used for training. Defaults to None.
Expand Down
3 changes: 3 additions & 0 deletions external/anomaly/adapters/anomalib/data/mvtec.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,10 +108,12 @@ def __init__(

def get_samples(self) -> DataFrame:
"""Get MVTec samples.

Get MVTec samples in a pandas DataFrame. Update the certain columns
to match the OTE naming terminology. For example, column `split` is
renamed to `subset`. Labels are also renamed by creating their
corresponding OTE LabelEntities

Returns:
DataFrame: Final list of samples comprising all the required
information to create the OTE Dataset.
Expand Down Expand Up @@ -139,6 +141,7 @@ def get_samples(self) -> DataFrame:

def generate(self) -> DatasetEntity:
"""Generate OTE Anomaly Dataset.

Returns:
DatasetEntity: Output OTE Anomaly Dataset from an MVTec
"""
Expand Down
2 changes: 2 additions & 0 deletions external/anomaly/adapters/anomalib/logger/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ def get_logger(
be directly returned. During initialization, a StreamHandler will always be
added. If `log_file` is specified and the process rank is 0, a FileHandler
will also be added.

Args:
name (str): Logger name.
log_file (str | None): The log filename. If specified, a FileHandler
Expand All @@ -29,6 +30,7 @@ def get_logger(
"Error" thus be silent most of the time.
file_mode (str): The file mode used in opening log file.
Defaults to 'w'.

Returns:
logging.Logger: The expected logger.
"""
Expand Down
4 changes: 1 addition & 3 deletions external/anomaly/configs/base/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
"""
Base configurable parameter for anomaly tasks
"""
"""Base configurable parameter for anomaly tasks."""

# Copyright (C) 2022 Intel Corporation
#
Expand Down
Loading