Spaces:
Running
on
Zero
Running
on
Zero
Upload 2 files
Browse files
app.py
CHANGED
@@ -111,9 +111,11 @@ def run_lora(prompt, cfg_scale, steps, selected_index, randomize_seed, seed, wid
|
|
111 |
progress(1, desc="Preparing Inference.")
|
112 |
|
113 |
image = generate_image(prompt, trigger_word, steps, seed, cfg_scale, width, height, lora_scale, progress)
|
114 |
-
|
|
|
|
|
115 |
if selected_index is not None: pipe.unload_lora_weights()
|
116 |
-
|
117 |
clear_cache()
|
118 |
return image, seed
|
119 |
|
|
|
111 |
progress(1, desc="Preparing Inference.")
|
112 |
|
113 |
image = generate_image(prompt, trigger_word, steps, seed, cfg_scale, width, height, lora_scale, progress)
|
114 |
+
if is_valid_lora(lora_json):
|
115 |
+
pipe.unfuse_lora()
|
116 |
+
pipe.unload_lora_weights()
|
117 |
if selected_index is not None: pipe.unload_lora_weights()
|
118 |
+
pipe.to("cpu")
|
119 |
clear_cache()
|
120 |
return image, seed
|
121 |
|
mod.py
CHANGED
@@ -135,7 +135,7 @@ def fuse_loras(pipe, lorajson: list[dict]):
|
|
135 |
if not a_list: return
|
136 |
pipe.set_adapters(a_list, adapter_weights=w_list)
|
137 |
pipe.fuse_lora(adapter_names=a_list, lora_scale=1.0)
|
138 |
-
pipe.unload_lora_weights()
|
139 |
|
140 |
|
141 |
change_base_model.zerogpu = True
|
|
|
135 |
if not a_list: return
|
136 |
pipe.set_adapters(a_list, adapter_weights=w_list)
|
137 |
pipe.fuse_lora(adapter_names=a_list, lora_scale=1.0)
|
138 |
+
#pipe.unload_lora_weights()
|
139 |
|
140 |
|
141 |
change_base_model.zerogpu = True
|