From 077db2d701bf2dbb8608d94ab34640dfbc2ed583 Mon Sep 17 00:00:00 2001 From: rasbt Date: Wed, 29 May 2024 22:51:46 +0000 Subject: [PATCH] resolve redundancies --- litgpt/__main__.py | 10 ---------- litgpt/finetune/adapter.py | 3 +-- litgpt/finetune/adapter_v2.py | 3 +-- litgpt/finetune/full.py | 3 +-- litgpt/finetune/lora.py | 3 +-- litgpt/generate/adapter.py | 3 +-- litgpt/generate/adapter_v2.py | 3 +-- litgpt/generate/base.py | 3 +-- litgpt/generate/full.py | 3 +-- litgpt/generate/sequentially.py | 3 +-- litgpt/generate/tp.py | 2 +- litgpt/scripts/convert_hf_checkpoint.py | 3 +-- litgpt/scripts/merge_lora.py | 3 +-- 13 files changed, 12 insertions(+), 33 deletions(-) diff --git a/litgpt/__main__.py b/litgpt/__main__.py index c6db4ff9e8..9dffc2210f 100644 --- a/litgpt/__main__.py +++ b/litgpt/__main__.py @@ -31,16 +31,6 @@ from jsonargparse import ArgumentParser -def _new_parser(**kwargs: Any) -> "ArgumentParser": - from jsonargparse import ActionConfigFile, ArgumentParser - - parser = ArgumentParser(**kwargs) - parser.add_argument( - "-c", "--config", action=ActionConfigFile, help="Path to a configuration file in json or yaml format." - ) - return parser - - def main() -> None: parser_data = { "download": {"fn": download_fn, "_help": "Download weights or tokenizer data from the Hugging Face Hub."}, diff --git a/litgpt/finetune/adapter.py b/litgpt/finetune/adapter.py index cf03cbb94b..f0b9606364 100644 --- a/litgpt/finetune/adapter.py +++ b/litgpt/finetune/adapter.py @@ -39,7 +39,7 @@ def setup( - checkpoint_dir: str, + checkpoint_dir: Path, out_dir: Path = Path("out/finetune/adapter"), precision: Optional[str] = None, quantize: Optional[Literal["bnb.nf4", "bnb.nf4-dq", "bnb.fp4", "bnb.fp4-dq", "bnb.int8-training"]] = None, @@ -80,7 +80,6 @@ def setup( devices = parse_devices(devices) out_dir = init_out_dir(out_dir) - checkpoint_dir = Path(checkpoint_dir) check_valid_checkpoint_dir(checkpoint_dir) config = Config.from_file(checkpoint_dir / "model_config.yaml") diff --git a/litgpt/finetune/adapter_v2.py b/litgpt/finetune/adapter_v2.py index c7bf59bed2..924e5e520b 100644 --- a/litgpt/finetune/adapter_v2.py +++ b/litgpt/finetune/adapter_v2.py @@ -39,7 +39,7 @@ def setup( - checkpoint_dir: str, + checkpoint_dir: Path, out_dir: Path = Path("out/finetune/adapter-v2"), precision: Optional[str] = None, quantize: Optional[Literal["bnb.nf4", "bnb.nf4-dq", "bnb.fp4", "bnb.fp4-dq", "bnb.int8-training"]] = None, @@ -81,7 +81,6 @@ def setup( devices = parse_devices(devices) out_dir = init_out_dir(out_dir) - checkpoint_dir = Path(checkpoint_dir) check_valid_checkpoint_dir(checkpoint_dir) config = Config.from_file(checkpoint_dir / "model_config.yaml") diff --git a/litgpt/finetune/full.py b/litgpt/finetune/full.py index 939a7ea105..f6d3be0f49 100644 --- a/litgpt/finetune/full.py +++ b/litgpt/finetune/full.py @@ -36,7 +36,7 @@ def setup( - checkpoint_dir: str, + checkpoint_dir: Path, out_dir: Path = Path("out/finetune/full"), precision: Optional[str] = None, devices: Union[int, str] = 1, @@ -78,7 +78,6 @@ def setup( devices = parse_devices(devices) out_dir = init_out_dir(out_dir) - checkpoint_dir = Path(checkpoint_dir) check_valid_checkpoint_dir(checkpoint_dir) config = Config.from_file(checkpoint_dir / "model_config.yaml") diff --git a/litgpt/finetune/lora.py b/litgpt/finetune/lora.py index d5114cba98..6b33ac98cc 100644 --- a/litgpt/finetune/lora.py +++ b/litgpt/finetune/lora.py @@ -40,7 +40,7 @@ def setup( - checkpoint_dir: str, + checkpoint_dir: Path, out_dir: Path = Path("out/finetune/lora"), precision: Optional[str] = None, quantize: Optional[Literal["bnb.nf4", "bnb.nf4-dq", "bnb.fp4", "bnb.fp4-dq", "bnb.int8-training"]] = None, @@ -99,7 +99,6 @@ def setup( devices = parse_devices(devices) out_dir = init_out_dir(out_dir) - checkpoint_dir = Path(checkpoint_dir) check_valid_checkpoint_dir(checkpoint_dir) config = Config.from_file( checkpoint_dir / "model_config.yaml", diff --git a/litgpt/generate/adapter.py b/litgpt/generate/adapter.py index 9afdb90966..6cd99c0746 100644 --- a/litgpt/generate/adapter.py +++ b/litgpt/generate/adapter.py @@ -17,7 +17,7 @@ def main( - checkpoint_dir: str, + checkpoint_dir: Path, prompt: str = "What food do llamas eat?", input: str = "", adapter_path: Path = Path("out/finetune/adapter/final/lit_model.pth.adapter"), @@ -63,7 +63,6 @@ def main( samples. precision: Indicates the Fabric precision setting to use. """ - checkpoint_dir = Path(checkpoint_dir) precision = precision or get_default_supported_precision(training=False) plugins = None diff --git a/litgpt/generate/adapter_v2.py b/litgpt/generate/adapter_v2.py index 440c7e68b0..11e4420023 100644 --- a/litgpt/generate/adapter_v2.py +++ b/litgpt/generate/adapter_v2.py @@ -17,7 +17,7 @@ def main( - checkpoint_dir: str, + checkpoint_dir: Path, prompt: str = "What food do llamas eat?", input: str = "", adapter_path: Path = Path("out/finetune/adapter-v2/final/lit_model.pth.adapter_v2"), @@ -63,7 +63,6 @@ def main( samples. precision: Indicates the Fabric precision setting to use. """ - checkpoint_dir = Path(checkpoint_dir) precision = precision or get_default_supported_precision(training=False) plugins = None diff --git a/litgpt/generate/base.py b/litgpt/generate/base.py index 25be4e9af0..bc67c4fdf5 100644 --- a/litgpt/generate/base.py +++ b/litgpt/generate/base.py @@ -133,7 +133,7 @@ def generate( @torch.inference_mode() def main( - checkpoint_dir: str, + checkpoint_dir: Path, prompt: str = "What food do llamas eat?", *, num_samples: int = 1, @@ -178,7 +178,6 @@ def main( precision: Indicates the Fabric precision setting to use. compile: Whether to compile the model. """ - checkpoint_dir = Path(checkpoint_dir) precision = precision or get_default_supported_precision(training=False) plugins = None diff --git a/litgpt/generate/full.py b/litgpt/generate/full.py index f0eda349de..ca607e2784 100644 --- a/litgpt/generate/full.py +++ b/litgpt/generate/full.py @@ -16,7 +16,7 @@ def main( - checkpoint_dir: str, + checkpoint_dir: Path, prompt: str = "What food do llamas eat?", input: str = "", finetuned_path: Path = Path("out/full/alpaca/lit_model_finetuned.pth"), @@ -62,7 +62,6 @@ def main( samples. precision: Indicates the Fabric precision setting to use. """ - checkpoint_dir = Path(checkpoint_dir) precision = precision or get_default_supported_precision(training=False) plugins = None diff --git a/litgpt/generate/sequentially.py b/litgpt/generate/sequentially.py index d8cdb49eae..25c6a0bc00 100644 --- a/litgpt/generate/sequentially.py +++ b/litgpt/generate/sequentially.py @@ -112,7 +112,7 @@ def replace_device(module: torch.nn.Module, replace: torch.device, by: torch.dev @torch.inference_mode() def main( - checkpoint_dir: str, + checkpoint_dir: Path, prompt: str = "What food do llamas eat?", *, num_samples: int = 1, @@ -156,7 +156,6 @@ def main( precision: Indicates the Fabric precision setting to use. compile: Whether to compile the model. """ - checkpoint_dir = Path(checkpoint_dir) precision = precision or get_default_supported_precision(training=False) plugins = None diff --git a/litgpt/generate/tp.py b/litgpt/generate/tp.py index 0703bb9e89..efe42226db 100644 --- a/litgpt/generate/tp.py +++ b/litgpt/generate/tp.py @@ -90,7 +90,7 @@ def tensor_parallel(fabric: L.Fabric, model: GPT) -> GPT: @torch.inference_mode() def main( - checkpoint_dir: str, + checkpoint_dir: Path, prompt: str = "What food do llamas eat?", *, num_samples: int = 1, diff --git a/litgpt/scripts/convert_hf_checkpoint.py b/litgpt/scripts/convert_hf_checkpoint.py index 879c85df75..dae97ad40a 100644 --- a/litgpt/scripts/convert_hf_checkpoint.py +++ b/litgpt/scripts/convert_hf_checkpoint.py @@ -287,7 +287,7 @@ def load_param(param: Union[torch.Tensor, NotYetLoadedTensor], name: str, dtype: @torch.inference_mode() def convert_hf_checkpoint( - checkpoint_dir: str, + checkpoint_dir: Path, *, model_name: Optional[str] = None, dtype: Optional[str] = None, @@ -302,7 +302,6 @@ def convert_hf_checkpoint( dtype: The data type to convert the checkpoint files to. If not specified, the weights will remain in the dtype they are downloaded in. """ - checkpoint_dir = Path(checkpoint_dir) if model_name is None: model_name = checkpoint_dir.name if dtype is not None: diff --git a/litgpt/scripts/merge_lora.py b/litgpt/scripts/merge_lora.py index 249b8d6fb0..8debdcaef0 100644 --- a/litgpt/scripts/merge_lora.py +++ b/litgpt/scripts/merge_lora.py @@ -13,7 +13,7 @@ def merge_lora( - checkpoint_dir: str, + checkpoint_dir: Path, pretrained_checkpoint_dir: Optional[Path] = None, precision: Optional[str] = None ) -> None: @@ -34,7 +34,6 @@ def merge_lora( precision: Optional precision setting to instantiate the model weights in. By default, this will automatically be inferred from the metadata in the given ``checkpoint_dir`` directory. """ - checkpoint_dir = Path(checkpoint_dir) check_valid_checkpoint_dir(checkpoint_dir, model_filename="lit_model.pth.lora") if pretrained_checkpoint_dir is not None: check_valid_checkpoint_dir(pretrained_checkpoint_dir)