Spaces:
Sleeping
Sleeping
fix: syntax
Browse files
app.py
CHANGED
@@ -32,6 +32,7 @@ models = [
|
|
32 |
("OpenJourney V2", "prompthero/openjourney-v2", 1),
|
33 |
]
|
34 |
|
|
|
35 |
base_name, base_model, clip_skip = models[0]
|
36 |
|
37 |
samplers_k_diffusion = [
|
@@ -129,7 +130,7 @@ def setup_model(name, lora_state=None, lora_scale=1.0):
|
|
129 |
te_cache[model] = text_encoder
|
130 |
lora_cache[model] = LoRANetwork(text_encoder, unet)
|
131 |
|
132 |
-
if current_model != model:
|
133 |
# offload current model
|
134 |
unet_cache[current_model].to("cpu")
|
135 |
te_cache[current_model].to("cpu")
|
|
|
32 |
("OpenJourney V2", "prompthero/openjourney-v2", 1),
|
33 |
]
|
34 |
|
35 |
+
keep_vram = ["Korakoe/AbyssOrangeMix2-HF", "andite/pastel-mix"]
|
36 |
base_name, base_model, clip_skip = models[0]
|
37 |
|
38 |
samplers_k_diffusion = [
|
|
|
130 |
te_cache[model] = text_encoder
|
131 |
lora_cache[model] = LoRANetwork(text_encoder, unet)
|
132 |
|
133 |
+
if current_model != model and current_model not in keep_vram:
|
134 |
# offload current model
|
135 |
unet_cache[current_model].to("cpu")
|
136 |
te_cache[current_model].to("cpu")
|