Spaces:
Running
on
A100
Running
on
A100
Update app.py
Browse files
app.py
CHANGED
@@ -130,7 +130,7 @@ sam_model = LangSAM()
|
|
130 |
def infer(ref_style_file, style_description, caption, progress):
|
131 |
global models_rbm, models_b, device
|
132 |
if low_vram:
|
133 |
-
models_to(models_rbm, device=device)
|
134 |
try:
|
135 |
|
136 |
caption = f"{caption} in {style_description}"
|
@@ -167,7 +167,7 @@ def infer(ref_style_file, style_description, caption, progress):
|
|
167 |
|
168 |
if low_vram:
|
169 |
# The sampling process uses more vram, so we offload everything except two modules to the cpu.
|
170 |
-
models_to(models_rbm, device="cpu")
|
171 |
|
172 |
progress(0.4, "Starting Stage C reverse process")
|
173 |
# Stage C reverse process.
|
@@ -234,7 +234,7 @@ def infer(ref_style_file, style_description, caption, progress):
|
|
234 |
def infer_compo(style_description, ref_style_file, caption, ref_sub_file, progress):
|
235 |
global models_rbm, models_b, device, sam_model
|
236 |
if low_vram:
|
237 |
-
models_to(models_rbm, device=device)
|
238 |
models_to(sam_model, device=device)
|
239 |
models_to(sam_model.sam, device=device)
|
240 |
try:
|
@@ -283,7 +283,7 @@ def infer_compo(style_description, ref_style_file, caption, ref_sub_file, progre
|
|
283 |
unconditions_b = core_b.get_conditions(batch, models_b, extras_b, is_eval=True, is_unconditional=True)
|
284 |
|
285 |
if low_vram:
|
286 |
-
models_to(models_rbm, device="cpu")
|
287 |
models_to(sam_model, device="cpu")
|
288 |
models_to(sam_model.sam, device="cpu")
|
289 |
|
|
|
130 |
def infer(ref_style_file, style_description, caption, progress):
|
131 |
global models_rbm, models_b, device
|
132 |
if low_vram:
|
133 |
+
models_to(models_rbm, device=device, excepts=["generator", "previewer"])
|
134 |
try:
|
135 |
|
136 |
caption = f"{caption} in {style_description}"
|
|
|
167 |
|
168 |
if low_vram:
|
169 |
# The sampling process uses more vram, so we offload everything except two modules to the cpu.
|
170 |
+
models_to(models_rbm, device="cpu", excepts=["generator", "previewer"])
|
171 |
|
172 |
progress(0.4, "Starting Stage C reverse process")
|
173 |
# Stage C reverse process.
|
|
|
234 |
def infer_compo(style_description, ref_style_file, caption, ref_sub_file, progress):
|
235 |
global models_rbm, models_b, device, sam_model
|
236 |
if low_vram:
|
237 |
+
models_to(models_rbm, device=device, excepts=["generator", "previewer"])
|
238 |
models_to(sam_model, device=device)
|
239 |
models_to(sam_model.sam, device=device)
|
240 |
try:
|
|
|
283 |
unconditions_b = core_b.get_conditions(batch, models_b, extras_b, is_eval=True, is_unconditional=True)
|
284 |
|
285 |
if low_vram:
|
286 |
+
models_to(models_rbm, device="cpu", excepts=["generator", "previewer"])
|
287 |
models_to(sam_model, device="cpu")
|
288 |
models_to(sam_model.sam, device="cpu")
|
289 |
|