Spaces:
Paused
Paused
Commit
•
b626f76
1
Parent(s):
edf556a
Update app.py
Browse files
app.py
CHANGED
@@ -34,20 +34,19 @@ def update_selection(selected_state: gr.SelectData):
|
|
34 |
return updated_text, instance_prompt, selected_state
|
35 |
|
36 |
vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch.float16)
|
37 |
-
|
38 |
"stabilityai/stable-diffusion-xl-base-1.0",
|
39 |
vae=vae,
|
40 |
torch_dtype=torch.float16,
|
41 |
).to("cpu")
|
42 |
original_pipe = copy.deepcopy(mutable_pipe)
|
43 |
-
|
44 |
|
45 |
last_lora = ""
|
46 |
last_merged = False
|
47 |
|
48 |
def run_lora(prompt, negative, weight, selected_state):
|
49 |
-
global last_lora, last_merged
|
50 |
-
pipe = mutable_pipe
|
51 |
if(not selected_state):
|
52 |
raise gr.Error("You must select a LoRA")
|
53 |
repo_name = sdxl_loras[selected_state.index][2]
|
|
|
34 |
return updated_text, instance_prompt, selected_state
|
35 |
|
36 |
vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch.float16)
|
37 |
+
pipe = StableDiffusionXLPipeline.from_pretrained(
|
38 |
"stabilityai/stable-diffusion-xl-base-1.0",
|
39 |
vae=vae,
|
40 |
torch_dtype=torch.float16,
|
41 |
).to("cpu")
|
42 |
original_pipe = copy.deepcopy(mutable_pipe)
|
43 |
+
pipe.to("cuda")
|
44 |
|
45 |
last_lora = ""
|
46 |
last_merged = False
|
47 |
|
48 |
def run_lora(prompt, negative, weight, selected_state):
|
49 |
+
global last_lora, last_merged, pipe
|
|
|
50 |
if(not selected_state):
|
51 |
raise gr.Error("You must select a LoRA")
|
52 |
repo_name = sdxl_loras[selected_state.index][2]
|