Fix issue loading loras
Browse files- lora_loading.py +1 -4
lora_loading.py
CHANGED
@@ -430,10 +430,7 @@ def apply_lora_to_model(model: Flux, lora_path: str, lora_scale: float = 1.0):
|
|
430 |
weight_f16 = module.weight.clone().detach().float()
|
431 |
lora_sd = get_lora_for_key(key, lora_weights)
|
432 |
weight_f16 = apply_lora_weight_to_module(
|
433 |
-
weight_f16,
|
434 |
-
lora_sd,
|
435 |
-
lora_scale=lora_scale,
|
436 |
-
from_original_flux=from_original_flux,
|
437 |
)
|
438 |
if weight_is_f8:
|
439 |
module.set_weight_tensor(weight_f16.type(dtype))
|
|
|
430 |
weight_f16 = module.weight.clone().detach().float()
|
431 |
lora_sd = get_lora_for_key(key, lora_weights)
|
432 |
weight_f16 = apply_lora_weight_to_module(
|
433 |
+
weight_f16, lora_sd, lora_scale=lora_scale
|
|
|
|
|
|
|
434 |
)
|
435 |
if weight_is_f8:
|
436 |
module.set_weight_tensor(weight_f16.type(dtype))
|