Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Optionally Avoid recomputing features #722

Merged
merged 17 commits into from
Apr 11, 2022
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 7 additions & 3 deletions torchmetrics/collections.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,10 +221,14 @@ def compute(self) -> Dict[str, Any]:

return {k: m.compute() for k, m in self.items()}

def reset(self) -> None:
"""Iteratively call reset for each metric."""
def reset(self, exclude_states: Optional[Sequence[str]] = None) -> None:
"""Iteratively call reset for each metric.

Args:
exclude_states: sequence of strings indicating metric states that should not be reset.
"""
for _, m in self.items(keep_base=True):
m.reset()
m.reset(exclude_states=exclude_states)

def clone(self, prefix: Optional[str] = None, postfix: Optional[str] = None) -> "MetricCollection":
"""Make a copy of the metric collection
Expand Down
11 changes: 11 additions & 0 deletions torchmetrics/image/fid.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,9 @@ class FrechetInceptionDistance(Metric):
- an ``nn.Module`` for using a custom feature extractor. Expects that its forward method returns
an ``[N,d]`` matrix where ``N`` is the batch size and ``d`` is the feature size.

reset_real_features: Whether to also reset the real features. Since in many cases the real dataset does not
change, the features can cached them to avoid recomputing them which is costly. Set this to ``False`` if
your dataset does not change.
compute_on_step:
Forward only calls ``update()`` and returns None if this is set to False.

Expand Down Expand Up @@ -215,6 +218,7 @@ class FrechetInceptionDistance(Metric):
def __init__(
self,
feature: Union[int, torch.nn.Module] = 2048,
reset_real_features: bool = True,
compute_on_step: bool = False,
dist_sync_on_step: bool = False,
process_group: Optional[Any] = None,
Expand Down Expand Up @@ -254,6 +258,10 @@ def __init__(
self.add_state("real_features", [], dist_reduce_fx=None)
self.add_state("fake_features", [], dist_reduce_fx=None)

exclude_states = () if reset_real_features else ("real_features",)

self._reset_excluded_states = exclude_states

def update(self, imgs: Tensor, real: bool) -> None: # type: ignore
"""Update the state with extracted features.

Expand Down Expand Up @@ -288,3 +296,6 @@ def compute(self) -> Tensor:

# compute fid
return _compute_fid(mean1, cov1, mean2, cov2).to(orig_dtype)

def reset(self, exclude_states: Optional[Sequence[str]] = None) -> None:
justusschock marked this conversation as resolved.
Show resolved Hide resolved
super().reset({*self._reset_excluded_states, *exclude_states})
11 changes: 11 additions & 0 deletions torchmetrics/image/kid.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,9 @@ class KernelInceptionDistance(Metric):
Scale-length of polynomial kernel. If set to ``None`` will be automatically set to the feature size
coef:
Bias term in the polynomial kernel.
reset_real_features: Whether to also reset the real features. Since in many cases the real dataset does not
change, the features can cached them to avoid recomputing them which is costly. Set this to ``False`` if
your dataset does not change.
compute_on_step:
Forward only calls ``update()`` and returns None if this is set to False.

Expand Down Expand Up @@ -180,6 +183,7 @@ def __init__(
degree: int = 3,
gamma: Optional[float] = None, # type: ignore
coef: float = 1.0,
reset_real_features: bool = True,
compute_on_step: bool = False,
dist_sync_on_step: bool = False,
process_group: Optional[Any] = None,
Expand Down Expand Up @@ -240,6 +244,10 @@ def __init__(
self.add_state("real_features", [], dist_reduce_fx=None)
self.add_state("fake_features", [], dist_reduce_fx=None)

exclude_states = () if reset_real_features else ("real_features",)

self._reset_excluded_states = exclude_states

def update(self, imgs: Tensor, real: bool) -> None: # type: ignore
"""Update the state with extracted features.

Expand Down Expand Up @@ -281,3 +289,6 @@ def compute(self) -> Tuple[Tensor, Tensor]:
kid_scores_.append(o)
kid_scores = torch.stack(kid_scores_)
return kid_scores.mean(), kid_scores.std(unbiased=False)

def reset(self, exclude_states: Optional[Sequence[str]] = None) -> None:
super().reset({*self._reset_excluded_states, *exclude_states})
10 changes: 8 additions & 2 deletions torchmetrics/metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -396,13 +396,19 @@ def compute(self) -> Any:
"""Override this method to compute the final metric value from state variables synchronized across the
distributed backend."""

def reset(self) -> None:
"""This method automatically resets the metric state variables to their default value."""
def reset(self, exclude_states: Optional[Sequence[str]] = None) -> None:
"""This method automatically resets the metric state variables to their default value.

Args:
exclude_states: sequence of strings indicating metric states that should not be reset.
"""
self._update_called = False
self._forward_cache = None
self._computed = None

for attr, default in self._defaults.items():
if exclude_states is not None and attr in exclude_states:
continue
current_val = getattr(self, attr)
if isinstance(default, Tensor):
setattr(self, attr, default.detach().clone().to(current_val.device))
Expand Down
12 changes: 8 additions & 4 deletions torchmetrics/wrappers/minmax.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,10 +108,14 @@ def compute(self) -> Dict[str, Tensor]: # type: ignore
self.min_val = val if self.min_val > val else self.min_val
return {"raw": val, "max": self.max_val, "min": self.min_val}

def reset(self) -> None:
"""Sets ``max_val`` and ``min_val`` to the initialization bounds and resets the base metric."""
super().reset()
self._base_metric.reset()
def reset(self, exclude_states: Optional[Sequence[str]] = None) -> None:
"""Sets ``max_val`` and ``min_val`` to the initialization bounds and resets the base metric.

Args:
exclude_states: sequence of strings indicating metric states that should not be reset.
"""
super().reset(exclude_states)
self._base_metric.reset(exclude_states)

@staticmethod
def _is_suitable_val(val: Union[int, float, Tensor]) -> bool:
Expand Down
10 changes: 7 additions & 3 deletions torchmetrics/wrappers/multioutput.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,11 @@ def forward(self, *args: Any, **kwargs: Any) -> Any:
return None
return results

def reset(self) -> None:
"""Reset all underlying metrics."""
def reset(self, exclude_states: Optional[Sequence[str]] = None) -> None:
"""Reset all underlying metrics.

Args:
exclude_states: sequence of strings indicating metric states that should not be reset.
"""
for metric in self.metrics:
metric.reset()
metric.reset(exclude_states)
10 changes: 7 additions & 3 deletions torchmetrics/wrappers/tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,9 +141,13 @@ def compute_all(self) -> Tensor:
return {k: torch.stack([r[k] for r in res], dim=0) for k in keys}
return torch.stack(res, dim=0)

def reset(self) -> None:
"""Resets the current metric being tracked."""
self[-1].reset()
def reset(self, exclude_states: Optional[Sequence[str]] = None) -> None:
"""Resets the current metric being tracked.

Args:
exclude_states: sequence of strings indicating metric states that should not be reset.
"""
self[-1].reset(exclude_states)

def reset_all(self) -> None:
"""Resets all metrics being tracked."""
Expand Down