Skip to content

Commit

Permalink
A yapf update broke some formatting...re-running the linter (mosaicml…
Browse files Browse the repository at this point in the history
  • Loading branch information
ravi-mosaicml authored and coryMosaicML committed Feb 23, 2022
1 parent 65f35be commit 84bb846
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 4 deletions.
2 changes: 1 addition & 1 deletion composer/algorithms/alibi/alibi.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def apply_alibi(model: torch.nn.Module, heads_per_layer: int, max_sequence_lengt
zero_and_freeze_expand_position_embeddings(model=model,
attribute=position_embedding_attribute,
new_embedding_length=max_sequence_length)
log.info(f" Position embedding expanded to sequence " f"length {max_sequence_length}, zeroed, and frozen")
log.info(f" Position embedding expanded to sequence length {max_sequence_length}, zeroed, and frozen")

def convert_attention(module: torch.nn.Module, module_index: int = None):
module = register_alibi(module=module, n_heads=heads_per_layer, max_token_length=max_sequence_length)
Expand Down
2 changes: 1 addition & 1 deletion composer/datasets/hparams.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ class SyntheticHparamsMixin(hp.Hparams, abc.ABC):
Ignored if :attr:`use_synthetic` is False. (Default: ``CONTIGUOUS_FORMAT``)
"""

use_synthetic: bool = hp.optional("Whether to use synthetic data. Defaults to False." "", default=False)
use_synthetic: bool = hp.optional("Whether to use synthetic data. Defaults to False.", default=False)
synthetic_num_unique_samples: int = hp.optional("The number of unique samples to allocate memory for.", default=100)
synthetic_device: str = hp.optional("Device to store the sample pool. Should be `cuda` or `cpu`. Defauls to `cpu`.",
default="cpu")
Expand Down
5 changes: 3 additions & 2 deletions composer/optim/pytorch_future.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def __init__(self,
verbose=False,
interval='step'):
if warmup_method not in ("constant", "linear"):
raise ValueError("Only 'constant' or 'linear' warmup_method accepted, but " "got {}".format(warmup_method))
raise ValueError("Only 'constant' or 'linear' warmup_method accepted, but got {}".format(warmup_method))
self.warmup_factor = warmup_factor
self.warmup_iters = warmup_iters
self.warmup_method = warmup_method
Expand All @@ -84,7 +84,8 @@ def get_lr(self):
"""

if not self._get_lr_called_within_step:
warnings.warn("To get the last learning rate computed by the scheduler, " "please use `get_last_lr()`.")
warnings.warn("To get the last learning rate computed by the scheduler, "
"please use `get_last_lr()`.")

if self.last_epoch == 0:
return [group['lr'] * self.warmup_factor for group in self.optimizer.param_groups]
Expand Down

0 comments on commit 84bb846

Please sign in to comment.