|
|
|
@ -31,27 +31,6 @@ import comfy.lora
|
|
|
|
|
from comfy.types import UnetWrapperFunction
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def weight_decompose(dora_scale, weight, lora_diff, alpha, strength, intermediate_dtype):
|
|
|
|
|
dora_scale = comfy.model_management.cast_to_device(dora_scale, weight.device, intermediate_dtype)
|
|
|
|
|
lora_diff *= alpha
|
|
|
|
|
weight_calc = weight + lora_diff.type(weight.dtype)
|
|
|
|
|
weight_norm = (
|
|
|
|
|
weight_calc.transpose(0, 1)
|
|
|
|
|
.reshape(weight_calc.shape[1], -1)
|
|
|
|
|
.norm(dim=1, keepdim=True)
|
|
|
|
|
.reshape(weight_calc.shape[1], *[1] * (weight_calc.dim() - 1))
|
|
|
|
|
.transpose(0, 1)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
weight_calc *= (dora_scale / weight_norm).type(weight.dtype)
|
|
|
|
|
if strength != 1.0:
|
|
|
|
|
weight_calc -= weight
|
|
|
|
|
weight += strength * (weight_calc)
|
|
|
|
|
else:
|
|
|
|
|
weight[:] = weight_calc
|
|
|
|
|
return weight
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def set_model_options_patch_replace(model_options, patch, name, block_name, number, transformer_index=None):
|
|
|
|
|
to = model_options["transformer_options"].copy()
|
|
|
|
|
|
|
|
|
|