fffiloni commited on
Commit
340ef7f
1 Parent(s): 7fead22

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -8
app.py CHANGED
@@ -295,14 +295,6 @@ def reset_compo_inference_state():
295
  torch.cuda.empty_cache()
296
  gc.collect()
297
 
298
- # Move necessary models back to the correct device
299
- if low_vram:
300
- models_to(models_rbm, device="cpu", excepts=["generator", "previewer"])
301
- models_rbm.generator.to(device)
302
- models_rbm.previewer.to(device)
303
- else:
304
- models_to(models_rbm, device=device)
305
-
306
  # Move SAM model components to CPU if they exist
307
  models_to(sam_model, device="cpu")
308
  models_to(sam_model.sam, device="cpu")
 
295
  torch.cuda.empty_cache()
296
  gc.collect()
297
 
 
 
 
 
 
 
 
 
298
  # Move SAM model components to CPU if they exist
299
  models_to(sam_model, device="cpu")
300
  models_to(sam_model.sam, device="cpu")