diff --git a/otx/algorithms/classification/adapters/mmcls/task.py b/otx/algorithms/classification/adapters/mmcls/task.py index ea6ae730afa..a31be2193bf 100644 --- a/otx/algorithms/classification/adapters/mmcls/task.py +++ b/otx/algorithms/classification/adapters/mmcls/task.py @@ -408,9 +408,8 @@ def _train_model( ) if self._hyperparams.learning_parameters.auto_decrease_batch_size: - validate = isinstance(self, NNCFBaseTask) # nncf needs eval hooks train_func = partial(train_model, meta=deepcopy(meta), model=deepcopy(model), distributed=False) - adapt_batch_size(train_func, cfg, datasets, validate) + adapt_batch_size(train_func, cfg, datasets, isinstance(self, NNCFBaseTask)) # nncf needs eval hooks train_model( model, diff --git a/otx/algorithms/detection/adapters/mmdet/task.py b/otx/algorithms/detection/adapters/mmdet/task.py index 1f6a5b52946..c7bf809e422 100644 --- a/otx/algorithms/detection/adapters/mmdet/task.py +++ b/otx/algorithms/detection/adapters/mmdet/task.py @@ -279,9 +279,8 @@ def _train_model( validate = bool(cfg.data.get("val", None)) if self._hyperparams.learning_parameters.auto_decrease_batch_size: - validate = isinstance(self, NNCFBaseTask) # nncf needs eval hooks train_func = partial(train_detector, meta=deepcopy(meta), model=deepcopy(model), distributed=False) - adapt_batch_size(train_func, cfg, datasets, validate) + adapt_batch_size(train_func, cfg, datasets, isinstance(self, NNCFBaseTask)) # nncf needs eval hooks train_detector( model, diff --git a/otx/algorithms/segmentation/adapters/mmseg/task.py b/otx/algorithms/segmentation/adapters/mmseg/task.py index a5f2cf6293b..8c9f2aca3e7 100644 --- a/otx/algorithms/segmentation/adapters/mmseg/task.py +++ b/otx/algorithms/segmentation/adapters/mmseg/task.py @@ -372,9 +372,8 @@ def _train_model( validate = bool(cfg.data.get("val", None)) if self._hyperparams.learning_parameters.auto_decrease_batch_size: - validate = isinstance(self, NNCFBaseTask) # nncf needs eval hooks train_func = partial(train_segmentor, meta=deepcopy(meta), model=deepcopy(model), distributed=False) - adapt_batch_size(train_func, cfg, datasets, validate) + adapt_batch_size(train_func, cfg, datasets, isinstance(self, NNCFBaseTask)) # nncf needs eval hooks train_segmentor( model,