From a5dcb8f4596fb9378e74fcbab062594bf1d635da Mon Sep 17 00:00:00 2001 From: duzhanwei Date: Mon, 5 Aug 2024 12:21:09 +0800 Subject: [PATCH] fix non contiguous tensor value error in save_pretrained Signed-off-by: duzhanwei --- src/transformers/modeling_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 651d20728254..f4ec0874c212 100755 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -2742,7 +2742,7 @@ def save_pretrained( if module_map: filename_to_tensors = logging.tqdm(filename_to_tensors, desc="Saving checkpoint shards") for shard_file, tensors in filename_to_tensors: - shard = {tensor: state_dict[tensor] for tensor in tensors} + shard = {tensor: state_dict[tensor].contiguous() for tensor in tensors} # remake shard with onloaded parameters if necessary if module_map: if accelerate_version < version.parse("0.31"):