Skip to content

Commit

Permalink
Fix save_lora_weights in pipeline_utils.py (huggingface#1643)
Browse files Browse the repository at this point in the history
  • Loading branch information
regisss authored and huijuanzh committed Jan 7, 2025
1 parent a619a55 commit dcce890
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions optimum/habana/diffusers/pipelines/pipeline_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,10 @@
from diffusers.utils.torch_utils import is_compiled_module
from huggingface_hub import create_repo

from optimum.habana.utils import to_device_dtype
from optimum.utils import logging

from ...transformers.gaudi_configuration import GaudiConfig
from ...utils import to_device_dtype


logger = logging.get_logger(__name__)
Expand Down Expand Up @@ -396,7 +396,8 @@ def save_lora_weights(
text_encoder_2_lora_layers = to_device_dtype(text_encoder_2_lora_layers, target_device=torch.device("cpu"))

# text_encoder_2_lora_layers is only supported by some diffuser pipelines
if text_encoder_2_lora_layers:
signature = inspect.signature(super().save_lora_weights)
if "text_encoder_2_lora_layers" in signature.parameters:
return super().save_lora_weights(
save_directory,
unet_lora_layers,
Expand Down

0 comments on commit dcce890

Please sign in to comment.