xenomirant commited on
Commit
bd37079
·
verified ·
1 Parent(s): 3dfc7ee

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +62 -1
app.py CHANGED
@@ -1,9 +1,22 @@
1
  import gradio as gr
2
  import numpy as np
3
  import random
 
4
 
5
  # import spaces #[uncomment to use ZeroGPU]
6
  from diffusers import DiffusionPipeline
 
 
 
 
 
 
 
 
 
 
 
 
7
  import torch
8
 
9
  device = "cuda" if torch.cuda.is_available() else "cpu"
@@ -29,10 +42,47 @@ def infer(
29
  height,
30
  guidance_scale,
31
  num_inference_steps,
 
32
  progress=gr.Progress(track_tqdm=True),
33
  ):
34
 
35
  pipe = DiffusionPipeline.from_pretrained(model_id, torch_dtype=torch_dtype)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  pipe = pipe.to(device)
37
 
38
  if randomize_seed:
@@ -74,7 +124,6 @@ with gr.Blocks(css=css) as demo:
74
  ["stabilityai/sdxl-turbo", "lightx2v/Qwen-Image-Lightning", "tencent/HunyuanImage-2.1", "black-forest-labs/FLUX.1-dev"],
75
  label="Image-to-text model",
76
  visible=True,
77
- value=model_repo_id
78
  )
79
 
80
  with gr.Row():
@@ -91,6 +140,7 @@ with gr.Blocks(css=css) as demo:
91
  result = gr.Image(label="Result", show_label=False)
92
 
93
  with gr.Accordion("Advanced Settings", open=False):
 
94
 
95
  negative_prompt = gr.Text(
96
  label="Negative prompt",
@@ -109,6 +159,15 @@ with gr.Blocks(css=css) as demo:
109
 
110
  randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
111
 
 
 
 
 
 
 
 
 
 
112
  with gr.Row():
113
  width = gr.Slider(
114
  label="Width",
@@ -148,6 +207,7 @@ with gr.Blocks(css=css) as demo:
148
  triggers=[run_button.click, prompt.submit],
149
  fn=infer,
150
  inputs=[
 
151
  prompt,
152
  negative_prompt,
153
  seed,
@@ -156,6 +216,7 @@ with gr.Blocks(css=css) as demo:
156
  height,
157
  guidance_scale,
158
  num_inference_steps,
 
159
  ],
160
  outputs=[result, seed],
161
  )
 
1
  import gradio as gr
2
  import numpy as np
3
  import random
4
+ from typing import Optional
5
 
6
  # import spaces #[uncomment to use ZeroGPU]
7
  from diffusers import DiffusionPipeline
8
+
9
+ from diffusers import (
10
+ DPMSolverMultistepScheduler,
11
+ DPMSolverSinglestepScheduler,
12
+ KDPM2DiscreteScheduler,
13
+ KDPM2AncestralDiscreteScheduler,
14
+ EulerDiscreteScheduler,
15
+ EulerAncestralDiscreteScheduler,
16
+ HeunDiscreteScheduler,
17
+ LMSDiscreteScheduler,
18
+ )
19
+
20
  import torch
21
 
22
  device = "cuda" if torch.cuda.is_available() else "cpu"
 
42
  height,
43
  guidance_scale,
44
  num_inference_steps,
45
+ scheduler: Optional[str] = None,
46
  progress=gr.Progress(track_tqdm=True),
47
  ):
48
 
49
  pipe = DiffusionPipeline.from_pretrained(model_id, torch_dtype=torch_dtype)
50
+ match scheduler:
51
+ case None:
52
+ pass
53
+ case "DPMSolverMultistepScheduler":
54
+ if DPMSolverMultistepScheduler in pipe.scheduler.compatibles:
55
+ scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config)
56
+ pipe.scheduler = scheduler
57
+ case "DPMSolverSinglestepScheduler":
58
+ if DPMSolverSinglestepScheduler in pipe.scheduler.compatibles:
59
+ scheduler = DPMSolverSinglestepScheduler.from_config(pipe.scheduler.config)
60
+ pipe.scheduler = scheduler
61
+ case "KDPM2DiscreteScheduler":
62
+ if KDPM2DiscreteScheduler in pipe.scheduler.compatibles:
63
+ scheduler = KDPM2DiscreteScheduler.from_config(pipe.scheduler.config)
64
+ pipe.scheduler = scheduler
65
+ case "KDPM2AncestralDiscreteScheduler":
66
+ if KDPM2AncestralDiscreteScheduler in pipe.scheduler.compatibles:
67
+ scheduler = KDPM2AncestralDiscreteScheduler.from_config(pipe.scheduler.config)
68
+ pipe.scheduler = scheduler
69
+ case "EulerDiscreteScheduler":
70
+ if EulerDiscreteScheduler in pipe.scheduler.compatibles:
71
+ scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config)
72
+ pipe.scheduler = scheduler
73
+ case "EulerAncestralDiscreteScheduler":
74
+ if EulerAncestralDiscreteScheduler in pipe.scheduler.compatibles:
75
+ scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
76
+ pipe.scheduler = scheduler
77
+ case "HeunDiscreteScheduler":
78
+ if HeunDiscreteScheduler in pipe.scheduler.compatibles:
79
+ scheduler = HeunDiscreteScheduler.from_config(pipe.scheduler.config)
80
+ pipe.scheduler = scheduler
81
+ case "LMSDiscreteScheduler":
82
+ if LMSDiscreteScheduler in pipe.scheduler.compatibles:
83
+ scheduler = LMSDiscreteScheduler.from_config(pipe.scheduler.config)
84
+ pipe.scheduler = scheduler
85
+
86
  pipe = pipe.to(device)
87
 
88
  if randomize_seed:
 
124
  ["stabilityai/sdxl-turbo", "lightx2v/Qwen-Image-Lightning", "tencent/HunyuanImage-2.1", "black-forest-labs/FLUX.1-dev"],
125
  label="Image-to-text model",
126
  visible=True,
 
127
  )
128
 
129
  with gr.Row():
 
140
  result = gr.Image(label="Result", show_label=False)
141
 
142
  with gr.Accordion("Advanced Settings", open=False):
143
+
144
 
145
  negative_prompt = gr.Text(
146
  label="Negative prompt",
 
159
 
160
  randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
161
 
162
+ scheduler = gr.Dropdown(
163
+ [None, "DPMSolverMultistepScheduler", "DPMSolverSinglestepScheduler",
164
+ "KDPM2DiscreteScheduler", "KDPM2AncestralDiscreteScheduler",
165
+ "EulerDiscreteScheduler", "EulerAncestralDiscreteScheduler",
166
+ "HeunDiscreteScheduler", "LMSDiscreteScheduler",],
167
+ label="Scheduler",
168
+ visible=True
169
+ )
170
+
171
  with gr.Row():
172
  width = gr.Slider(
173
  label="Width",
 
207
  triggers=[run_button.click, prompt.submit],
208
  fn=infer,
209
  inputs=[
210
+ model_id,
211
  prompt,
212
  negative_prompt,
213
  seed,
 
216
  height,
217
  guidance_scale,
218
  num_inference_steps,
219
+ scheduler,
220
  ],
221
  outputs=[result, seed],
222
  )