Skip to content

Commit

Permalink
Add Dora, fix NTC key names
Browse files Browse the repository at this point in the history
  • Loading branch information
AI-Casanova committed Jul 2, 2024
1 parent 1f7c23b commit 34ec4e3
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 0 deletions.
2 changes: 2 additions & 0 deletions extensions-builtin/Lora/lora_convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,8 @@ def original(self, key):

def diffusers(self, key):
if self.is_sdxl:
if "diffusion_model" in key: # Fix NTC Slider naming error
key = key.replace("diffusion_model", "lora_unet")
map_keys = list(self.UNET_CONVERSION_MAP.keys()) # prefix of U-Net modules
map_keys.sort()
search_key = key.replace(self.LORA_PREFIX_UNET, "").replace(self.OFT_PREFIX_UNET, "").replace(self.LORA_PREFIX_TEXT_ENCODER1, "").replace(self.LORA_PREFIX_TEXT_ENCODER2, "")
Expand Down
25 changes: 25 additions & 0 deletions extensions-builtin/Lora/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,8 @@ def __init__(self, net: Network, weights: NetworkWeights):
self.bias = weights.w.get("bias")
self.alpha = weights.w["alpha"].item() if "alpha" in weights.w else None
self.scale = weights.w["scale"].item() if "scale" in weights.w else None
self.dora_scale = weights.w.get("dora_scale", None)
self.dora_norm_dims = len(self.shape) - 1

def multiplier(self):
unet_multiplier = 3 * [self.network.unet_multiplier] if not isinstance(self.network.unet_multiplier, list) else self.network.unet_multiplier
Expand All @@ -109,6 +111,27 @@ def calc_scale(self):
return self.alpha / self.dim
return 1.0

def apply_weight_decompose(self, updown, orig_weight):
# Match the device/dtype
orig_weight = orig_weight.to(updown.dtype)
dora_scale = self.dora_scale.to(device=orig_weight.device, dtype=updown.dtype)
updown = updown.to(orig_weight.device)

merged_scale1 = updown + orig_weight
merged_scale1_norm = (
merged_scale1.transpose(0, 1)
.reshape(merged_scale1.shape[1], -1)
.norm(dim=1, keepdim=True)
.reshape(merged_scale1.shape[1], *[1] * self.dora_norm_dims)
.transpose(0, 1)
)

dora_merged = (
merged_scale1 * (dora_scale / merged_scale1_norm)
)
final_updown = dora_merged - orig_weight
return final_updown

def finalize_updown(self, updown, orig_weight, output_shape, ex_bias=None):
if self.bias is not None:
updown = updown.reshape(self.bias.shape)
Expand All @@ -120,6 +143,8 @@ def finalize_updown(self, updown, orig_weight, output_shape, ex_bias=None):
updown = updown.reshape(orig_weight.shape)
if ex_bias is not None:
ex_bias = ex_bias * self.multiplier()
if self.dora_scale is not None:
updown = self.apply_weight_decompose(updown, orig_weight)
return updown * self.calc_scale() * self.multiplier(), ex_bias

def calc_updown(self, target):
Expand Down

0 comments on commit 34ec4e3

Please sign in to comment.