Skip to content

Commit

Permalink
Parallel flux on diffusers version 0.32 (#413)
Browse files Browse the repository at this point in the history
  • Loading branch information
xibosun authored Dec 26, 2024
1 parent 4d6a038 commit 70b156d
Showing 1 changed file with 5 additions and 1 deletion.
6 changes: 5 additions & 1 deletion xfuser/core/distributed/runtime_state.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import numpy as np
import torch
import diffusers
from diffusers import DiffusionPipeline
import torch.distributed

Expand Down Expand Up @@ -121,8 +122,11 @@ def __init__(self, pipeline: DiffusionPipeline, config: EngineConfig):
* pipeline.transformer.config.attention_head_dim,
)
else:
vae_scale_factor = pipeline.vae_scale_factor
if pipeline.__class__.__name__.startswith("Flux") and diffusers.__version__ >= '0.32':
vae_scale_factor *= 2
self._set_model_parameters(
vae_scale_factor=pipeline.vae_scale_factor,
vae_scale_factor=vae_scale_factor,
backbone_patch_size=pipeline.transformer.config.patch_size,
backbone_in_channel=pipeline.transformer.config.in_channels,
backbone_inner_dim=pipeline.transformer.config.num_attention_heads
Expand Down

0 comments on commit 70b156d

Please sign in to comment.