Skip to content

Commit

Permalink
Fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
ravi-mosaicml committed May 11, 2022
1 parent c26d6bf commit d3689af
Showing 1 changed file with 7 additions and 6 deletions.
13 changes: 7 additions & 6 deletions composer/trainer/trainer_hparams.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,8 @@
# They exist purely for pyright and should never need
__all__ = ["TrainerHparams", "FitHparams", "EvalHparams", "ExperimentHparams"]

Scheduler = Union[ComposerScheduler, PyTorchScheduler]

optimizer_registry = {
"adam": AdamHparams,
"adamw": AdamWHparams,
Expand Down Expand Up @@ -350,9 +352,9 @@ class TrainerHparams(hp.Hparams):
doc="Ratio by which to scale the training duration and learning rate schedules.",
default=1.0,
)
step_schedulers_every_batch: bool = hp.optional(
step_schedulers_every_batch: Optional[bool] = hp.optional(
doc="Whether schedulers will update after every optimizer step (True), or every epoch (False).",
default=True,
default=None,
)

# Evaluation
Expand Down Expand Up @@ -707,8 +709,7 @@ class FitKwargs(TypedDict):
duration: Optional[Union[int, str, Time[int]]]

# Schedulers
schedulers: Optional[Union[ComposerScheduler, PyTorchScheduler, Sequence[Union[ComposerScheduler,
PyTorchScheduler]]]]
schedulers: Optional[Union[Scheduler, Sequence[Scheduler]]]
scale_schedule_ratio: float
step_schedulers_every_batch: Optional[bool]

Expand Down Expand Up @@ -777,9 +778,9 @@ class FitHparams(hp.Hparams):
doc="Ratio by which to scale the training duration and learning rate schedules.",
default=1.0,
)
step_schedulers_every_batch: bool = hp.optional(
step_schedulers_every_batch: Optional[bool] = hp.optional(
doc="Whether schedulers will update after every optimizer step (True), or every epoch (False).",
default=True,
default=None,
)

# Evaluation
Expand Down

0 comments on commit d3689af

Please sign in to comment.