rafaaa2105 commited on
Commit
08203ce
1 Parent(s): b54b151

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -3
app.py CHANGED
@@ -1,3 +1,4 @@
 
1
  import torch
2
  from tqdm import tqdm
3
  from diffusers.models import AutoencoderKL
@@ -6,6 +7,10 @@ import gradio as gr
6
  import requests
7
  import spaces
8
 
 
 
 
 
9
  models_list = []
10
  loras_list = ["None"]
11
  models = {}
@@ -52,7 +57,7 @@ def download_and_load_civitai_model(model_id, lora_id="", progress=gr.Progress(t
52
  return f"Error: No suitable file found for model {model_name}."
53
 
54
  file_extension = '.ckpt' if model_ckpt_url else '.safetensors'
55
- model_filename = f"{model_name}{file_extension}"
56
  download_file(model_url, model_filename)
57
 
58
  if lora_id:
@@ -65,7 +70,8 @@ def download_and_load_civitai_model(model_id, lora_id="", progress=gr.Progress(t
65
  if not lora_safetensors_url:
66
  return f"Error: No suitable file found for LoRA {lora_name}."
67
 
68
- download_file(lora_safetensors_url, f"{lora_name}.safetensors")
 
69
  if lora_name not in loras_list:
70
  loras_list.append(lora_name)
71
  else:
@@ -99,7 +105,8 @@ def load_model(model, lora="", use_lora=False, progress=gr.Progress(track_tqdm=T
99
  )
100
 
101
  if use_lora and lora != "":
102
- pipeline.load_lora_weights(lora)
 
103
 
104
  pipeline.to("cuda")
105
  models[model] = pipeline
 
1
+ import os
2
  import torch
3
  from tqdm import tqdm
4
  from diffusers.models import AutoencoderKL
 
7
  import requests
8
  import spaces
9
 
10
+ # Ensure directories exist
11
+ os.makedirs('models', exist_ok=True)
12
+ os.makedirs('loras', exist_ok=True)
13
+
14
  models_list = []
15
  loras_list = ["None"]
16
  models = {}
 
57
  return f"Error: No suitable file found for model {model_name}."
58
 
59
  file_extension = '.ckpt' if model_ckpt_url else '.safetensors'
60
+ model_filename = os.path.join('models', f"{model_name}{file_extension}")
61
  download_file(model_url, model_filename)
62
 
63
  if lora_id:
 
70
  if not lora_safetensors_url:
71
  return f"Error: No suitable file found for LoRA {lora_name}."
72
 
73
+ lora_filename = os.path.join('loras', f"{lora_name}.safetensors")
74
+ download_file(lora_safetensors_url, lora_filename)
75
  if lora_name not in loras_list:
76
  loras_list.append(lora_name)
77
  else:
 
105
  )
106
 
107
  if use_lora and lora != "":
108
+ lora_path = os.path.join('loras', lora + '.safetensors')
109
+ pipeline.load_lora_weights(lora_path)
110
 
111
  pipeline.to("cuda")
112
  models[model] = pipeline