Spaces:
Running
on
Zero
Running
on
Zero
fix example error by cache
Browse files
app.py
CHANGED
@@ -20,6 +20,8 @@ else:
|
|
20 |
|
21 |
MAX_SEED = np.iinfo(np.int32).max
|
22 |
|
|
|
|
|
23 |
|
24 |
# Initialize the pipeline and download the sd3 medium model
|
25 |
pipe = StableDiffusion3Pipeline.from_pretrained("stabilityai/stable-diffusion-3-medium-diffusers", torch_dtype=torch.float16)
|
@@ -126,9 +128,10 @@ with gr.Blocks(css=css) as demo:
|
|
126 |
|
127 |
gr.Examples(
|
128 |
examples=examples,
|
129 |
-
inputs=[prompt, enhance_prompt],
|
130 |
-
outputs=[result, better_prompt],
|
131 |
fn=generate_image,
|
|
|
|
|
|
|
132 |
)
|
133 |
|
134 |
gr.on(
|
|
|
20 |
|
21 |
MAX_SEED = np.iinfo(np.int32).max
|
22 |
|
23 |
+
CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES", "0") == "1"
|
24 |
+
|
25 |
|
26 |
# Initialize the pipeline and download the sd3 medium model
|
27 |
pipe = StableDiffusion3Pipeline.from_pretrained("stabilityai/stable-diffusion-3-medium-diffusers", torch_dtype=torch.float16)
|
|
|
128 |
|
129 |
gr.Examples(
|
130 |
examples=examples,
|
|
|
|
|
131 |
fn=generate_image,
|
132 |
+
inputs=[prompt, enhance_prompt, negative_prompt, num_inference_steps, guidance_scale, height, width, seed, num_images_per_prompt],
|
133 |
+
outputs=[result, better_prompt],
|
134 |
+
cache_examples=CACHE_EXAMPLES
|
135 |
)
|
136 |
|
137 |
gr.on(
|