Spaces:
Paused
Paused
Commit
•
e93307c
1
Parent(s):
40d0ad1
Update app.py
Browse files
app.py
CHANGED
@@ -58,14 +58,14 @@ def update_selection(evt: gr.SelectData, width, height):
|
|
58 |
)
|
59 |
|
60 |
@spaces.GPU(duration=70)
|
61 |
-
def generate_image(
|
62 |
pipe.to("cuda")
|
63 |
generator = torch.Generator(device="cuda").manual_seed(seed)
|
64 |
|
65 |
with calculateDuration("Generating image"):
|
66 |
# Generate image
|
67 |
image = pipe(
|
68 |
-
prompt=
|
69 |
num_inference_steps=steps,
|
70 |
guidance_scale=cfg_scale,
|
71 |
width=width,
|
@@ -82,7 +82,16 @@ def run_lora(prompt, cfg_scale, steps, selected_index, randomize_seed, seed, wid
|
|
82 |
selected_lora = loras[selected_index]
|
83 |
lora_path = selected_lora["repo"]
|
84 |
trigger_word = selected_lora["trigger_word"]
|
85 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
86 |
# Load LoRA weights
|
87 |
with calculateDuration(f"Loading LoRA weights for {selected_lora['title']}"):
|
88 |
if "weights" in selected_lora:
|
@@ -96,7 +105,7 @@ def run_lora(prompt, cfg_scale, steps, selected_index, randomize_seed, seed, wid
|
|
96 |
if randomize_seed:
|
97 |
seed = random.randint(0, MAX_SEED)
|
98 |
|
99 |
-
image = generate_image(
|
100 |
pipe.to("cpu")
|
101 |
#pipe.unfuse_lora()
|
102 |
pipe.unload_lora_weights()
|
|
|
58 |
)
|
59 |
|
60 |
@spaces.GPU(duration=70)
|
61 |
+
def generate_image(prompt_mash, steps, seed, cfg_scale, width, height, lora_scale, progress):
|
62 |
pipe.to("cuda")
|
63 |
generator = torch.Generator(device="cuda").manual_seed(seed)
|
64 |
|
65 |
with calculateDuration("Generating image"):
|
66 |
# Generate image
|
67 |
image = pipe(
|
68 |
+
prompt=prompt_mash,
|
69 |
num_inference_steps=steps,
|
70 |
guidance_scale=cfg_scale,
|
71 |
width=width,
|
|
|
82 |
selected_lora = loras[selected_index]
|
83 |
lora_path = selected_lora["repo"]
|
84 |
trigger_word = selected_lora["trigger_word"]
|
85 |
+
if(trigger_word):
|
86 |
+
if "trigger_position" in selected_lora:
|
87 |
+
if selected_lora["trigger_position"] == "prepend":
|
88 |
+
prompt_mash = f"{trigger_word} {prompt}"
|
89 |
+
else:
|
90 |
+
prompt_mash = f"{prompt} {trigger_word}"
|
91 |
+
else:
|
92 |
+
prompt_mash = f"{trigger_word} {prompt}"
|
93 |
+
else:
|
94 |
+
prompt_mash = prompt
|
95 |
# Load LoRA weights
|
96 |
with calculateDuration(f"Loading LoRA weights for {selected_lora['title']}"):
|
97 |
if "weights" in selected_lora:
|
|
|
105 |
if randomize_seed:
|
106 |
seed = random.randint(0, MAX_SEED)
|
107 |
|
108 |
+
image = generate_image(prompt_mash, steps, seed, cfg_scale, width, height, lora_scale, progress)
|
109 |
pipe.to("cpu")
|
110 |
#pipe.unfuse_lora()
|
111 |
pipe.unload_lora_weights()
|