From af44e8f7b7fd035d2d8793dec0636ed08d9200e1 Mon Sep 17 00:00:00 2001 From: Samet Date: Wed, 10 Aug 2022 06:33:03 -0700 Subject: [PATCH] Update docstring to match pydocstyle criterion --- .pre-commit-config.yaml | 12 ++++ .../adapters/anomalib/callbacks/__init__.py | 4 +- .../adapters/anomalib/callbacks/inference.py | 4 +- .../adapters/anomalib/callbacks/progress.py | 37 ++++--------- .../anomalib/callbacks/score_report.py | 10 +--- .../adapters/anomalib/config/__init__.py | 4 +- .../anomalib/config/anomalib_config.py | 30 ++++++---- .../adapters/anomalib/data/__init__.py | 4 +- .../anomaly/adapters/anomalib/data/data.py | 55 +++++++++++++------ .../anomaly/adapters/anomalib/data/dataset.py | 3 +- .../anomaly/adapters/anomalib/data/mvtec.py | 3 + .../adapters/anomalib/logger/logger.py | 2 + external/anomaly/configs/base/__init__.py | 4 +- .../anomaly/configs/base/configuration.py | 20 ++----- .../configs/base/configuration_enums.py | 29 ++++++---- external/anomaly/configs/padim/__init__.py | 4 +- .../anomaly/configs/padim/configuration.py | 8 +-- external/anomaly/configs/stfpm/__init__.py | 4 +- .../anomaly/configs/stfpm/configuration.py | 12 +--- external/anomaly/setup.py | 4 +- external/anomaly/tasks/__init__.py | 4 +- external/anomaly/tasks/inference.py | 3 +- external/anomaly/tasks/nncf.py | 3 +- external/anomaly/tasks/openvino.py | 42 +++++++------- external/anomaly/tools/__init__.py | 4 +- external/anomaly/tools/sample.py | 1 - pyproject.toml | 17 ++++++ 27 files changed, 170 insertions(+), 157 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f182025e395..e5ac0af47ad 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -85,3 +85,15 @@ repos: types: [python] args: ["--score=no"] exclude: "tests" + + - repo: https://github.com/PyCQA/pydocstyle + rev: 6.1.1 + hooks: + - id: pydocstyle + additional_dependencies: [toml] + name: pydocstyle + entry: pydocstyle + language: python + types: [python] + files: '^external/anomaly/.*\.py' + exclude: "external/anomaly/tests" diff --git a/external/anomaly/adapters/anomalib/callbacks/__init__.py b/external/anomaly/adapters/anomalib/callbacks/__init__.py index c6dbaa7c72d..89856fdc762 100644 --- a/external/anomaly/adapters/anomalib/callbacks/__init__.py +++ b/external/anomaly/adapters/anomalib/callbacks/__init__.py @@ -1,6 +1,4 @@ -""" -Callbacks for OTE inference -""" +"""Callbacks for OTE inference.""" # Copyright (C) 2021 Intel Corporation # diff --git a/external/anomaly/adapters/anomalib/callbacks/inference.py b/external/anomaly/adapters/anomalib/callbacks/inference.py index 50b437d3d5e..b0d70cf2b15 100644 --- a/external/anomaly/adapters/anomalib/callbacks/inference.py +++ b/external/anomaly/adapters/anomalib/callbacks/inference.py @@ -1,6 +1,4 @@ -""" -Inference Callbacks for OTE inference -""" +"""Inference Callbacks for OTE inference.""" # Copyright (C) 2021 Intel Corporation # diff --git a/external/anomaly/adapters/anomalib/callbacks/progress.py b/external/anomaly/adapters/anomalib/callbacks/progress.py index b746fac02c7..552c999803d 100644 --- a/external/anomaly/adapters/anomalib/callbacks/progress.py +++ b/external/anomaly/adapters/anomalib/callbacks/progress.py @@ -1,6 +1,4 @@ -""" -Progressbar Callback for OTE task -""" +"""Progressbar Callback for OTE task.""" # Copyright (C) 2021 Intel Corporation # @@ -24,8 +22,9 @@ class ProgressCallback(TQDMProgressBar): - """ - Modifies progress callback to show completion of the entire training step + """Progress Callback. + + Modify progress callback to show completion of the entire training step. """ def __init__(self, parameters: Optional[Union[TrainParameters, InferenceParameters]] = None) -> None: @@ -40,47 +39,35 @@ def __init__(self, parameters: Optional[Union[TrainParameters, InferenceParamete self.update_progress_callback = default_progress_callback def on_train_start(self, trainer, pl_module): - """ - Store max epochs and current epoch from trainer - """ + """Store max epochs and current epoch from trainer.""" super().on_train_start(trainer, pl_module) self.current_epoch = trainer.current_epoch self.max_epochs = trainer.max_epochs self._reset_progress() def on_predict_start(self, trainer, pl_module): - """ - Reset progress bar when prediction starts. - """ + """Reset progress bar when prediction starts.""" super().on_predict_start(trainer, pl_module) self._reset_progress() def on_test_start(self, trainer, pl_module): - """ - Reset progress bar when testing starts. - """ + """Reset progress bar when testing starts.""" super().on_test_start(trainer, pl_module) self._reset_progress() def on_train_batch_end(self, trainer, pl_module, outputs, batch, batch_idx): - """ - Adds training completion percentage to the progress bar - """ + """Adds training completion percentage to the progress bar.""" super().on_train_batch_end(trainer, pl_module, outputs, batch, batch_idx) self.current_epoch = trainer.current_epoch self._update_progress(stage="train") def on_predict_batch_end(self, trainer, pl_module, outputs, batch, batch_idx, dataloader_idx): - """ - Adds prediction completion percentage to the progress bar - """ + """Adds prediction completion percentage to the progress bar.""" super().on_predict_batch_end(trainer, pl_module, outputs, batch, batch_idx, dataloader_idx) self._update_progress(stage="predict") def on_test_batch_end(self, trainer, pl_module, outputs, batch, batch_idx, dataloader_idx): - """ - Adds testing completion percentage to the progress bar - """ + """Adds testing completion percentage to the progress bar.""" super().on_test_batch_end(trainer, pl_module, outputs, batch, batch_idx, dataloader_idx) self._update_progress(stage="test") @@ -88,13 +75,11 @@ def _reset_progress(self): self._progress = 0.0 def _get_progress(self, stage: str = "train") -> float: - """ - Get progress for train and test stages. + """Get progress for train and test stages. Args: stage (str, optional): Train or Test stages. Defaults to "train". """ - if stage == "train": # Progress is calculated on the upper bound (max epoch). # Early stopping might stop the training before the progress reaches 100% diff --git a/external/anomaly/adapters/anomalib/callbacks/score_report.py b/external/anomaly/adapters/anomalib/callbacks/score_report.py index f8b25abad8a..a5151bb12ec 100644 --- a/external/anomaly/adapters/anomalib/callbacks/score_report.py +++ b/external/anomaly/adapters/anomalib/callbacks/score_report.py @@ -1,4 +1,4 @@ -"""Score reporting callback""" +"""Score reporting callback.""" # Copyright (C) 2020 Intel Corporation # @@ -21,9 +21,7 @@ class ScoreReportingCallback(Callback): - """ - Callback for reporting score. - """ + """Callback for reporting score.""" def __init__(self, parameters: Optional[TrainParameters] = None) -> None: if parameters is not None: @@ -32,9 +30,7 @@ def __init__(self, parameters: Optional[TrainParameters] = None) -> None: self.score_reporting_callback = None def on_validation_epoch_end(self, trainer, pl_module): # pylint: disable=unused-argument - """ - If score exists in trainer.logged_metrics, report the score. - """ + """If score exists in trainer.logged_metrics, report the score.""" if self.score_reporting_callback is not None: score = None metric = getattr(self.score_reporting_callback, "metric", None) diff --git a/external/anomaly/adapters/anomalib/config/__init__.py b/external/anomaly/adapters/anomalib/config/__init__.py index 565b4d9df4d..2b79552d681 100644 --- a/external/anomaly/adapters/anomalib/config/__init__.py +++ b/external/anomaly/adapters/anomalib/config/__init__.py @@ -1,6 +1,4 @@ -""" -Initialization of Configurable parameter conversion between OTE and Anomalib. -""" +"""Initialization of Configurable parameter conversion between OTE and Anomalib.""" # Copyright (C) 2021 Intel Corporation # diff --git a/external/anomaly/adapters/anomalib/config/anomalib_config.py b/external/anomaly/adapters/anomalib/config/anomalib_config.py index 18832932d60..d0a998fb2ad 100644 --- a/external/anomaly/adapters/anomalib/config/anomalib_config.py +++ b/external/anomaly/adapters/anomalib/config/anomalib_config.py @@ -1,6 +1,4 @@ -""" -Configurable parameter conversion between OTE and Anomalib. -""" +"""Configurable parameter conversion between OTE and Anomalib.""" # Copyright (C) 2021 Intel Corporation # @@ -26,13 +24,18 @@ def get_anomalib_config(task_name: str, ote_config: ConfigurableParameters) -> Union[DictConfig, ListConfig]: - """ - Create an anomalib config object that matches the values specified in the OTE config. + """Get anomalib configuration. + + Create an anomalib config object that matches the values specified in the + OTE config. Args: - ote_config: ConfigurableParameters: OTE config object parsed from configuration.yaml file + ote_config: ConfigurableParameters: OTE config object parsed from + configuration.yaml file + Returns: - Anomalib config object for the specified model type with overwritten default values. + Anomalib config object for the specified model type with overwritten + default values. """ config_path = Path(anomalib.__file__).parent / "models" / task_name.lower() / "config.yaml" anomalib_config = get_configurable_parameters(model_name=task_name.lower(), config_path=config_path) @@ -41,7 +44,7 @@ def get_anomalib_config(task_name: str, ote_config: ConfigurableParameters) -> U def _anomalib_config_mapper(anomalib_config: Union[DictConfig, ListConfig], ote_config: ConfigurableParameters): - """Returns mapping from learning parameters to anomalib parameters + """Return mapping from learning parameters to anomalib parameters. Args: anomalib_config: DictConfig: Anomalib config object @@ -64,13 +67,16 @@ def _anomalib_config_mapper(anomalib_config: Union[DictConfig, ListConfig], ote_ def update_anomalib_config(anomalib_config: Union[DictConfig, ListConfig], ote_config: ConfigurableParameters): - """ - Overwrite the default parameter values in the anomalib config with the values specified in the OTE config. The - function is recursively called for each parameter group present in the OTE config. + """Update anomalib configuration. + + Overwrite the default parameter values in the anomalib config with the + values specified in the OTE config. The function is recursively called for + each parameter group present in the OTE config. Args: anomalib_config: DictConfig: Anomalib config object - ote_config: ConfigurableParameters: OTE config object parsed from configuration.yaml file + ote_config: ConfigurableParameters: OTE config object parsed from + configuration.yaml file """ for param in ote_config.parameters: assert param in anomalib_config.keys(), f"Parameter {param} not present in anomalib config." diff --git a/external/anomaly/adapters/anomalib/data/__init__.py b/external/anomaly/adapters/anomalib/data/__init__.py index 2fa3e5eb57a..f15526fee20 100644 --- a/external/anomaly/adapters/anomalib/data/__init__.py +++ b/external/anomaly/adapters/anomalib/data/__init__.py @@ -1,6 +1,4 @@ -""" -Initialization of Anomaly Dataset Utils -""" +"""Initialization of Anomaly Dataset Utils.""" # Copyright (C) 2021 Intel Corporation # diff --git a/external/anomaly/adapters/anomalib/data/data.py b/external/anomaly/adapters/anomalib/data/data.py index 754e042881c..4342bfead5b 100644 --- a/external/anomaly/adapters/anomalib/data/data.py +++ b/external/anomaly/adapters/anomalib/data/data.py @@ -1,6 +1,4 @@ -""" -Anomaly Dataset Utils -""" +"""Anomaly Dataset Utils.""" # Copyright (C) 2021 Intel Corporation # @@ -39,8 +37,8 @@ class OTEAnomalyDataset(Dataset): - """ - Anomaly Dataset Adaptor + """Anomaly Dataset Adaptor. + This class converts OTE Dataset into Anomalib dataset that is a sub-class of Vision Dataset. @@ -71,9 +69,25 @@ def __init__(self, config: Union[DictConfig, ListConfig], dataset: DatasetEntity ) def __len__(self) -> int: + """Get size of the dataset. + + Returns: + int: Size of the dataset. + """ return len(self.dataset) def __getitem__(self, index: int) -> Dict[str, Union[int, Tensor]]: + """Get dataset item. + + Args: + index (int): Index of the dataset sample. + + Raises: + ValueError: When the task type is not supported. + + Returns: + Dict[str, Union[int, Tensor]]: Dataset item. + """ dataset_item = self.dataset[index] item: Dict[str, Union[int, Tensor]] = {} item = {"index": index} @@ -98,8 +112,8 @@ def __getitem__(self, index: int) -> Dict[str, Union[int, Tensor]]: class OTEAnomalyDataModule(LightningDataModule): - """ - Anomaly DataModule + """Anomaly DataModule. + This class converts OTE Dataset into Anomalib dataset and stores train/val/test dataloaders. @@ -131,8 +145,7 @@ def __init__(self, config: Union[DictConfig, ListConfig], dataset: DatasetEntity self.predict_ote_dataset: DatasetEntity def setup(self, stage: Optional[str] = None) -> None: - """ - Setup Anomaly Data Module + """Setup Anomaly Data Module. Args: stage (Optional[str], optional): train/val/test stages. @@ -172,8 +185,10 @@ def summary(self): def train_dataloader( self, ) -> Union[DataLoader, List[DataLoader], Dict[str, DataLoader]]: - """ - Train Dataloader + """Train Dataloader. + + Returns: + Union[DataLoader, List[DataLoader], Dict[str, DataLoader]]: Train dataloader. """ dataset = OTEAnomalyDataset(self.config, self.train_ote_dataset, self.task_type) return DataLoader( @@ -184,8 +199,10 @@ def train_dataloader( ) def val_dataloader(self) -> Union[DataLoader, List[DataLoader]]: - """ - Validation Dataloader + """Validation Dataloader. + + Returns: + Union[DataLoader, List[DataLoader]]: Validation Dataloader. """ global_dataset, local_dataset = split_local_global_dataset(self.val_ote_dataset) logger.info(f"Global annotations: {len(global_dataset)}") @@ -204,8 +221,10 @@ def val_dataloader(self) -> Union[DataLoader, List[DataLoader]]: ) def test_dataloader(self) -> Union[DataLoader, List[DataLoader]]: - """ - Test Dataloader + """Test Dataloader. + + Returns: + Union[DataLoader, List[DataLoader]]: Test Dataloader. """ dataset = OTEAnomalyDataset(self.config, self.test_ote_dataset, self.task_type) return DataLoader( @@ -216,8 +235,10 @@ def test_dataloader(self) -> Union[DataLoader, List[DataLoader]]: ) def predict_dataloader(self) -> Union[DataLoader, List[DataLoader]]: - """ - Predict Dataloader + """Predict Dataloader. + + Returns: + Union[DataLoader, List[DataLoader]]: Predict Dataloader. """ dataset = OTEAnomalyDataset(self.config, self.predict_ote_dataset, self.task_type) return DataLoader( diff --git a/external/anomaly/adapters/anomalib/data/dataset.py b/external/anomaly/adapters/anomalib/data/dataset.py index 757c9401a12..d38477c922f 100644 --- a/external/anomaly/adapters/anomalib/data/dataset.py +++ b/external/anomaly/adapters/anomalib/data/dataset.py @@ -45,7 +45,8 @@ def __init__( val_subset: Optional[Dict[str, str]] = None, test_subset: Optional[Dict[str, str]] = None, ): - """ + """Base Anomaly Dataset. + Args: train_subset (Optional[Dict[str, str]], optional): Path to annotation and dataset used for training. Defaults to None. diff --git a/external/anomaly/adapters/anomalib/data/mvtec.py b/external/anomaly/adapters/anomalib/data/mvtec.py index 6b99027e4b4..7cf9803bfc7 100644 --- a/external/anomaly/adapters/anomalib/data/mvtec.py +++ b/external/anomaly/adapters/anomalib/data/mvtec.py @@ -108,10 +108,12 @@ def __init__( def get_samples(self) -> DataFrame: """Get MVTec samples. + Get MVTec samples in a pandas DataFrame. Update the certain columns to match the OTE naming terminology. For example, column `split` is renamed to `subset`. Labels are also renamed by creating their corresponding OTE LabelEntities + Returns: DataFrame: Final list of samples comprising all the required information to create the OTE Dataset. @@ -139,6 +141,7 @@ def get_samples(self) -> DataFrame: def generate(self) -> DatasetEntity: """Generate OTE Anomaly Dataset. + Returns: DatasetEntity: Output OTE Anomaly Dataset from an MVTec """ diff --git a/external/anomaly/adapters/anomalib/logger/logger.py b/external/anomaly/adapters/anomalib/logger/logger.py index d1708fba872..9ec06cbc1c6 100644 --- a/external/anomaly/adapters/anomalib/logger/logger.py +++ b/external/anomaly/adapters/anomalib/logger/logger.py @@ -20,6 +20,7 @@ def get_logger( be directly returned. During initialization, a StreamHandler will always be added. If `log_file` is specified and the process rank is 0, a FileHandler will also be added. + Args: name (str): Logger name. log_file (str | None): The log filename. If specified, a FileHandler @@ -29,6 +30,7 @@ def get_logger( "Error" thus be silent most of the time. file_mode (str): The file mode used in opening log file. Defaults to 'w'. + Returns: logging.Logger: The expected logger. """ diff --git a/external/anomaly/configs/base/__init__.py b/external/anomaly/configs/base/__init__.py index 398c125beb1..6d3bacfd0ee 100644 --- a/external/anomaly/configs/base/__init__.py +++ b/external/anomaly/configs/base/__init__.py @@ -1,6 +1,4 @@ -""" -Base configurable parameter for anomaly tasks -""" +"""Base configurable parameter for anomaly tasks.""" # Copyright (C) 2022 Intel Corporation # diff --git a/external/anomaly/configs/base/configuration.py b/external/anomaly/configs/base/configuration.py index 55f2879c9fa..19e5d627539 100644 --- a/external/anomaly/configs/base/configuration.py +++ b/external/anomaly/configs/base/configuration.py @@ -1,6 +1,4 @@ -""" -Configurable parameters for anomaly classification task -""" +"""Configurable parameters for anomaly classification task.""" # Copyright (C) 2021 Intel Corporation # @@ -35,9 +33,7 @@ @attrs class BaseAnomalyConfig(ConfigurableParameters): - """ - Base OTE configurable parameters for anomaly classification task. - """ + """Base OTE configurable parameters for anomaly classification task.""" header = string_attribute("Configuration for an anomaly classification task") description = header @@ -70,9 +66,7 @@ class LearningParameters(ParameterGroup): @attrs class DatasetParameters(ParameterGroup): - """ - Parameters related to dataloader - """ + """Parameters related to dataloader.""" header = string_attribute("Dataset Parameters") description = header @@ -89,9 +83,7 @@ class DatasetParameters(ParameterGroup): @attrs class POTParameters(ParameterGroup): - """ - Training parameters for post-training optimization - """ + """Training parameters for post-training optimization.""" header = string_attribute("POT Parameters") description = header @@ -113,9 +105,7 @@ class POTParameters(ParameterGroup): @attrs class NNCFOptimization(ParameterGroup): - """ - Parameters for NNCF optimization - """ + """Parameters for NNCF optimization.""" header = string_attribute("Optimization by NNCF") description = header diff --git a/external/anomaly/configs/base/configuration_enums.py b/external/anomaly/configs/base/configuration_enums.py index 1690e74a00b..ed5fda9a662 100644 --- a/external/anomaly/configs/base/configuration_enums.py +++ b/external/anomaly/configs/base/configuration_enums.py @@ -1,5 +1,7 @@ -""" -Enums needed to define the options of selectable parameters in the configurable parameter classes +"""Configuration Enums. + +Enums needed to define the options of selectable parameters in the configurable +parameter classes. """ # Copyright (C) 2021 Intel Corporation @@ -20,8 +22,9 @@ class POTQuantizationPreset(ConfigurableEnum): - """ - This Enum represents the quantization preset for post training optimization + """POT Quantization Preset Enum. + + This Enum represents the quantization preset for post training optimization. """ PERFORMANCE = "Performance" @@ -29,8 +32,10 @@ class POTQuantizationPreset(ConfigurableEnum): class EarlyStoppingMetrics(ConfigurableEnum): - """ - This enum represents the different metrics that can be used for early stopping + """Early Stopping Metric Enum. + + This enum represents the different metrics that can be used for early + stopping. """ IMAGE_ROC_AUC = "image_AUROC" @@ -38,8 +43,10 @@ class EarlyStoppingMetrics(ConfigurableEnum): class ModelName(ConfigurableEnum): - """ - This enum represents the different model architectures for anomaly classification + """Model Name Enum. + + This enum represents the different model architectures for anomaly + classification. """ STFPM = "stfpm" @@ -47,8 +54,10 @@ class ModelName(ConfigurableEnum): class ModelBackbone(ConfigurableEnum): - """ - This enum represents the common backbones that can be used with Padim and STFPM + """Model Backbone Enum. + + This enum represents the common backbones that can be used with Padim and + STFPM. """ RESNET18 = "resnet18" diff --git a/external/anomaly/configs/padim/__init__.py b/external/anomaly/configs/padim/__init__.py index b764a82efe0..9091f9b00e5 100644 --- a/external/anomaly/configs/padim/__init__.py +++ b/external/anomaly/configs/padim/__init__.py @@ -1,6 +1,4 @@ -""" -Base configuration parameters for Padim -""" +"""Base configuration parameters for Padim.""" # Copyright (C) 2022 Intel Corporation # diff --git a/external/anomaly/configs/padim/configuration.py b/external/anomaly/configs/padim/configuration.py index 85079c58843..6977644cd88 100644 --- a/external/anomaly/configs/padim/configuration.py +++ b/external/anomaly/configs/padim/configuration.py @@ -1,6 +1,4 @@ -""" -Configurable parameters for Padim anomaly task -""" +"""Configurable parameters for Padim anomaly task.""" # Copyright (C) 2021 Intel Corporation # @@ -23,9 +21,7 @@ @attrs class PadimAnomalyBaseConfig(BaseAnomalyConfig): - """ - Configurable parameters for PADIM anomaly classification task. - """ + """Configurable parameters for PADIM anomaly classification task.""" header = string_attribute("Configuration for Padim") description = header diff --git a/external/anomaly/configs/stfpm/__init__.py b/external/anomaly/configs/stfpm/__init__.py index fb2e6736121..1366d84f447 100644 --- a/external/anomaly/configs/stfpm/__init__.py +++ b/external/anomaly/configs/stfpm/__init__.py @@ -1,6 +1,4 @@ -""" -Base configuration parameters for STFPM -""" +"""Base configuration parameters for STFPM.""" # Copyright (C) 2022 Intel Corporation # diff --git a/external/anomaly/configs/stfpm/configuration.py b/external/anomaly/configs/stfpm/configuration.py index ae9357c1a9d..6af8685cb5e 100644 --- a/external/anomaly/configs/stfpm/configuration.py +++ b/external/anomaly/configs/stfpm/configuration.py @@ -1,6 +1,4 @@ -""" -Configurable parameters for STFPM anomaly base task -""" +"""Configurable parameters for STFPM anomaly base task.""" # Copyright (C) 2022 Intel Corporation # @@ -32,9 +30,7 @@ @attrs class STFPMAnomalyBaseConfig(BaseAnomalyConfig): - """ - Configurable parameters for STFPM anomaly base task. - """ + """Configurable parameters for STFPM anomaly base task.""" header = string_attribute("Configuration for STFPM") description = header @@ -69,9 +65,7 @@ class LearningParameters(BaseAnomalyConfig.LearningParameters): @attrs class EarlyStoppingParameters(ParameterGroup): - """ - Early stopping parameters - """ + """Early stopping parameters.""" header = string_attribute("Early Stopping Parameters") description = header diff --git a/external/anomaly/setup.py b/external/anomaly/setup.py index da37152318a..832ae3fb86d 100644 --- a/external/anomaly/setup.py +++ b/external/anomaly/setup.py @@ -1,6 +1,4 @@ -""" -Install anomalib wrapper for OTE -""" +"""Install anomalib wrapper for OTE.""" # Copyright (C) 2021 Intel Corporation # diff --git a/external/anomaly/tasks/__init__.py b/external/anomaly/tasks/__init__.py index 4627f1ba708..1172c7ea554 100644 --- a/external/anomaly/tasks/__init__.py +++ b/external/anomaly/tasks/__init__.py @@ -1,6 +1,4 @@ -""" -Initialization of OTE Anomalib -""" +"""Initialization of OTE Anomalib.""" # Copyright (C) 2022 Intel Corporation # diff --git a/external/anomaly/tasks/inference.py b/external/anomaly/tasks/inference.py index 59e1da132df..76b4ff730dc 100644 --- a/external/anomaly/tasks/inference.py +++ b/external/anomaly/tasks/inference.py @@ -208,7 +208,7 @@ def evaluate(self, output_resultset: ResultSetEntity, evaluation_metric: Optiona output_resultset.performance.dashboard_metrics.extend(accuracy.dashboard_metrics) def _export_to_onnx(self, onnx_path: str): - """Export model to ONNX + """Export model to ONNX. Args: onnx_path (str): path to save ONNX file @@ -263,7 +263,6 @@ def _model_info(self) -> Dict: Returns: Dict: Model info. """ - return { "model": self.model.state_dict(), "config": self.get_config(), diff --git a/external/anomaly/tasks/nncf.py b/external/anomaly/tasks/nncf.py index a727b281f47..23bfd6f7098 100644 --- a/external/anomaly/tasks/nncf.py +++ b/external/anomaly/tasks/nncf.py @@ -210,7 +210,6 @@ def _model_info(self) -> Dict: Returns: Dict: Model info. """ - return { "compression_state": self.compression_ctrl.get_compression_state(), # type: ignore "meta": { @@ -223,7 +222,7 @@ def _model_info(self) -> Dict: } def _export_to_onnx(self, onnx_path: str): - """Export model to ONNX + """Export model to ONNX. Args: onnx_path (str): path to save ONNX file diff --git a/external/anomaly/tasks/openvino.py b/external/anomaly/tasks/openvino.py index f2184200ed2..f2ab3e589c3 100644 --- a/external/anomaly/tasks/openvino.py +++ b/external/anomaly/tasks/openvino.py @@ -1,6 +1,4 @@ -""" -OpenVINO Anomaly Task -""" +"""OpenVINO Anomaly Task.""" # Copyright (C) 2021 Intel Corporation # @@ -71,8 +69,7 @@ class OTEOpenVINOAnomalyDataloader(DataLoader): - """ - Dataloader for loading OTE dataset into OTE OpenVINO Inferencer + """Dataloader for loading OTE dataset into OTE OpenVINO Inferencer. Args: dataset (DatasetEntity): OTE dataset entity @@ -89,20 +86,32 @@ def __init__( self.dataset = dataset self.inferencer = inferencer - def __getitem__(self, index): + def __getitem__(self, index: int): + """Get dataset item. + + Args: + index (int): Index of the dataset sample. + + Returns: + Dataset item. + """ image = self.dataset[index].numpy annotation = self.dataset[index].annotation_scene inputs = self.inferencer.pre_process(image) return (index, annotation), inputs - def __len__(self): + def __len__(self) -> int: + """Get size of the dataset. + + Returns: + int: Size of the dataset. + """ return len(self.dataset) class OpenVINOTask(IInferenceTask, IEvaluationTask, IOptimizationTask, IDeploymentTask): - """ - OpenVINO inference task + """OpenVINO inference task. Args: task_environment (TaskEnvironment): task environment of the trained anomaly model @@ -123,8 +132,7 @@ def __init__(self, task_environment: TaskEnvironment) -> None: self._base_dir = os.path.abspath(os.path.dirname(template_file_path)) def get_config(self) -> ADDict: - """ - Get Anomalib Config from task environment + """Get Anomalib Config from task environment. Returns: ADDict: Anomalib config @@ -193,7 +201,6 @@ def infer(self, dataset: DatasetEntity, inference_parameters: InferenceParameter def get_meta_data(self) -> Dict: """Get Meta Data.""" - image_threshold = np.frombuffer(self.task_environment.model.get_data("image_threshold"), dtype=np.float32) pixel_threshold = np.frombuffer(self.task_environment.model.get_data("pixel_threshold"), dtype=np.float32) min_value = np.frombuffer(self.task_environment.model.get_data("min"), dtype=np.float32) @@ -224,8 +231,7 @@ def evaluate(self, output_resultset: ResultSetEntity, evaluation_metric: Optiona output_resultset.performance = metric.get_performance() def _get_optimization_algorithms_configs(self) -> List[ADDict]: - """Returns list of optimization algorithms configurations""" - + """Returns list of optimization algorithms configurations.""" hparams = self.task_environment.get_hyper_parameters() optimization_config_path = os.path.join(self._base_dir, "pot_optimization_config.json") @@ -319,8 +325,7 @@ def optimize( logger.info("POT optimization completed") def load_inferencer(self) -> OpenVINOInferencer: - """ - Create the OpenVINO inferencer object + """Create the OpenVINO inferencer object. Returns: OpenVINOInferencer object @@ -337,7 +342,7 @@ def load_inferencer(self) -> OpenVINOInferencer: @staticmethod def __save_weights(path: str, data: bytes) -> None: - """Write data to file + """Write data to file. Args: path (str): Path of output file @@ -348,8 +353,7 @@ def __save_weights(path: str, data: bytes) -> None: @staticmethod def __load_weights(path: str, output_model: ModelEntity, key: str) -> None: - """ - Load weights into output model + """Load weights into output model. Args: path (str): Path to weights diff --git a/external/anomaly/tools/__init__.py b/external/anomaly/tools/__init__.py index 7f8d4fec77f..893bfe23484 100644 --- a/external/anomaly/tools/__init__.py +++ b/external/anomaly/tools/__init__.py @@ -1,6 +1,4 @@ -""" -Collection of tools to run anomaly training extension. -""" +"""Collection of tools to run anomaly training extension.""" # Copyright (C) 2022 Intel Corporation # diff --git a/external/anomaly/tools/sample.py b/external/anomaly/tools/sample.py index b7bf572928c..4b381503986 100644 --- a/external/anomaly/tools/sample.py +++ b/external/anomaly/tools/sample.py @@ -97,7 +97,6 @@ def __init__( >>> task.export() Performance(score: 0.9756097560975608, dashboard: (1 metric groups)) """ - logger.info("Loading the model template.") self.model_template = parse_model_template(model_template_path) diff --git a/pyproject.toml b/pyproject.toml index 54df23eb8c5..87366c79f8c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -95,3 +95,20 @@ known-third-party = ["enchant", "ote_sdk"] [tool.pylint.design] max-args = 7 + + +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# PYDOCSTYLE CONFIGURATION # +[tool.pydocstyle] +inherit = false +ignore = [ + "D107", # Missing docstring in __init__ + "D202", # No blank lines allowed after function docstring + "D203", # 1 blank line required before class docstring + "D213", # Multi-line docstring summary should start at the second line + "D401", # First line should be in imperative mood; try rephrasing + "D404", # First word of the docstring should not be This + "D406", # Section name should end with a newline + "D407", # Missing dashed underline after section + "D413", # Missing blank line after last section +]