Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
import os
|
|
|
2 |
from dataclasses import dataclass
|
3 |
|
4 |
import gradio as gr
|
@@ -731,6 +732,7 @@ print(result)
|
|
731 |
# model = Flux().to(dtype=torch.bfloat16, device="cuda")
|
732 |
# result = model.load_state_dict(load_file("/storage/dev/nyanko/flux-dev/flux1-dev.sft"))
|
733 |
|
|
|
734 |
@torch.inference_mode()
|
735 |
def generate_image(
|
736 |
prompt, width, height, guidance, seed,
|
@@ -852,7 +854,7 @@ def create_demo():
|
|
852 |
|
853 |
with gr.Column():
|
854 |
output_image = gr.Image(label="Generated Image")
|
855 |
-
output_seed = gr.
|
856 |
|
857 |
do_img2img.change(
|
858 |
fn=lambda x: [gr.update(visible=x), gr.update(visible=x), gr.update(visible=x)],
|
|
|
1 |
import os
|
2 |
+
import spaces
|
3 |
from dataclasses import dataclass
|
4 |
|
5 |
import gradio as gr
|
|
|
732 |
# model = Flux().to(dtype=torch.bfloat16, device="cuda")
|
733 |
# result = model.load_state_dict(load_file("/storage/dev/nyanko/flux-dev/flux1-dev.sft"))
|
734 |
|
735 |
+
@spaces.GPU
|
736 |
@torch.inference_mode()
|
737 |
def generate_image(
|
738 |
prompt, width, height, guidance, seed,
|
|
|
854 |
|
855 |
with gr.Column():
|
856 |
output_image = gr.Image(label="Generated Image")
|
857 |
+
output_seed = gr.Text(label="Used Seed")
|
858 |
|
859 |
do_img2img.change(
|
860 |
fn=lambda x: [gr.update(visible=x), gr.update(visible=x), gr.update(visible=x)],
|