From 099a36da8c6a746696da8208a07d237a7ae0e87e Mon Sep 17 00:00:00 2001 From: Jimin Ha Date: Fri, 13 Dec 2024 10:37:15 -0800 Subject: [PATCH] Check rope_scaling attr --- examples/text-generation/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/text-generation/utils.py b/examples/text-generation/utils.py index 7cc09e9905..41ceede6c6 100644 --- a/examples/text-generation/utils.py +++ b/examples/text-generation/utils.py @@ -433,7 +433,7 @@ def setup_distributed_model(args, model_dtype, model_kwargs, logger): # Construct model with fake meta tensors, later will be replaced on devices during ds-inference ckpt load with deepspeed.OnDevice(dtype=model_dtype, device="meta"): if ( - config.rope_scaling + hasattr(config, 'rope_scaling') and config.rope_scaling and config.rope_scaling["rope_type"] == "llama3" and config.max_position_embeddings > 8192 ):