John6666 commited on
Commit
b2351e2
1 Parent(s): 8b56b7e

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +14 -9
  2. mod.py +3 -3
app.py CHANGED
@@ -55,6 +55,16 @@ last_cn_on = False
55
 
56
  MAX_SEED = 2**32-1
57
 
 
 
 
 
 
 
 
 
 
 
58
  # https://huggingface.co/InstantX/FLUX.1-dev-Controlnet-Union
59
  # https://huggingface.co/spaces/jiuface/FLUX.1-dev-Controlnet-Union
60
  # https://huggingface.co/docs/diffusers/main/en/api/pipelines/flux
@@ -63,6 +73,7 @@ def change_base_model(repo_id: str, cn_on: bool, disable_model_cache: bool, prog
63
  global pipe, pipe_i2i, taef1, good_vae, controlnet_union, controlnet, last_model, last_cn_on, dtype
64
  try:
65
  if not disable_model_cache and (repo_id == last_model and cn_on is last_cn_on) or not is_repo_name(repo_id) or not is_repo_exists(repo_id): return gr.update(visible=True)
 
66
  pipe.to("cpu")
67
  pipe_i2i.to("cpu")
68
  good_vae.to("cpu")
@@ -463,13 +474,7 @@ def run_lora(prompt, image_input, image_strength, cfg_scale, steps, selected_ind
463
 
464
  # Unload previous LoRA weights
465
  with calculateDuration("Unloading LoRA"):
466
- try:
467
- #pipe.unfuse_lora()
468
- pipe.unload_lora_weights()
469
- #pipe_i2i.unfuse_lora()
470
- pipe_i2i.unload_lora_weights()
471
- except Exception as e:
472
- print(e)
473
 
474
  print(pipe.get_active_adapters()) #
475
  print(pipe_i2i.get_active_adapters()) #
@@ -482,8 +487,8 @@ def run_lora(prompt, image_input, image_strength, cfg_scale, steps, selected_ind
482
  lora_weights = []
483
  if is_valid_lora(lora_json): # Load External LoRA weights
484
  with calculateDuration("Loading External LoRA weights"):
485
- if image_input is not None: lora_names, lora_weights = fuse_loras(pipe_i2i, lora_json)
486
- else: lora_names, lora_weights = fuse_loras(pipe, lora_json)
487
  trigger_word = get_trigger_word(lora_json)
488
  prompt_mash = f"{prompt_mash} {trigger_word}"
489
  print("Prompt Mash: ", prompt_mash) #
 
55
 
56
  MAX_SEED = 2**32-1
57
 
58
+ def unload_lora():
59
+ global pipe, pipe_i2i
60
+ try:
61
+ #pipe.unfuse_lora()
62
+ pipe.unload_lora_weights()
63
+ #pipe_i2i.unfuse_lora()
64
+ pipe_i2i.unload_lora_weights()
65
+ except Exception as e:
66
+ print(e)
67
+
68
  # https://huggingface.co/InstantX/FLUX.1-dev-Controlnet-Union
69
  # https://huggingface.co/spaces/jiuface/FLUX.1-dev-Controlnet-Union
70
  # https://huggingface.co/docs/diffusers/main/en/api/pipelines/flux
 
73
  global pipe, pipe_i2i, taef1, good_vae, controlnet_union, controlnet, last_model, last_cn_on, dtype
74
  try:
75
  if not disable_model_cache and (repo_id == last_model and cn_on is last_cn_on) or not is_repo_name(repo_id) or not is_repo_exists(repo_id): return gr.update(visible=True)
76
+ unload_lora()
77
  pipe.to("cpu")
78
  pipe_i2i.to("cpu")
79
  good_vae.to("cpu")
 
474
 
475
  # Unload previous LoRA weights
476
  with calculateDuration("Unloading LoRA"):
477
+ unload_lora()
 
 
 
 
 
 
478
 
479
  print(pipe.get_active_adapters()) #
480
  print(pipe_i2i.get_active_adapters()) #
 
487
  lora_weights = []
488
  if is_valid_lora(lora_json): # Load External LoRA weights
489
  with calculateDuration("Loading External LoRA weights"):
490
+ if image_input is not None: pipe_i2i, lora_names, lora_weights = fuse_loras(pipe_i2i, lora_json)
491
+ else: pipe, lora_names, lora_weights = fuse_loras(pipe, lora_json)
492
  trigger_word = get_trigger_word(lora_json)
493
  prompt_mash = f"{prompt_mash} {trigger_word}"
494
  print("Prompt Mash: ", prompt_mash) #
mod.py CHANGED
@@ -269,7 +269,7 @@ def get_model_trigger(model_name: str):
269
  # https://github.com/huggingface/diffusers/issues/4919
270
  def fuse_loras(pipe, lorajson: list[dict]):
271
  try:
272
- if not lorajson or not isinstance(lorajson, list): return [], []
273
  a_list = []
274
  w_list = []
275
  for d in lorajson:
@@ -287,11 +287,11 @@ def fuse_loras(pipe, lorajson: list[dict]):
287
  pipe.load_lora_weights(k, weight_name = w_name, adapter_name = a_name, low_cpu_mem_usage=True)
288
  a_list.append(a_name)
289
  w_list.append(d["scale"])
290
- if not a_list: return [], []
291
  #pipe.set_adapters(a_list, adapter_weights=w_list)
292
  #pipe.fuse_lora(adapter_names=a_list, lora_scale=1.0)
293
  #pipe.unload_lora_weights()
294
- return a_list, w_list
295
  except Exception as e:
296
  print(f"External LoRA Error: {e}")
297
  raise Exception(f"External LoRA Error: {e}") from e
 
269
  # https://github.com/huggingface/diffusers/issues/4919
270
  def fuse_loras(pipe, lorajson: list[dict]):
271
  try:
272
+ if not lorajson or not isinstance(lorajson, list): return pipe, [], []
273
  a_list = []
274
  w_list = []
275
  for d in lorajson:
 
287
  pipe.load_lora_weights(k, weight_name = w_name, adapter_name = a_name, low_cpu_mem_usage=True)
288
  a_list.append(a_name)
289
  w_list.append(d["scale"])
290
+ if not a_list: return pipe, [], []
291
  #pipe.set_adapters(a_list, adapter_weights=w_list)
292
  #pipe.fuse_lora(adapter_names=a_list, lora_scale=1.0)
293
  #pipe.unload_lora_weights()
294
+ return pipe, a_list, w_list
295
  except Exception as e:
296
  print(f"External LoRA Error: {e}")
297
  raise Exception(f"External LoRA Error: {e}") from e