mukeshpandey2628 commited on
Commit
99cf11d
β€’
1 Parent(s): cc4395b

Update run/gradio_ootd.py

Browse files
Files changed (1) hide show
  1. run/gradio_ootd.py +26 -106
run/gradio_ootd.py CHANGED
@@ -15,7 +15,6 @@ from preprocess.humanparsing.run_parsing import Parsing
15
  from ootd.inference_ootd_hd import OOTDiffusionHD
16
  from ootd.inference_ootd_dc import OOTDiffusionDC
17
 
18
-
19
  openpose_model_hd = OpenPose(0)
20
  parsing_model_hd = Parsing(0)
21
  ootd_model_hd = OOTDiffusionHD(0)
@@ -24,23 +23,20 @@ openpose_model_dc = OpenPose(1)
24
  parsing_model_dc = Parsing(1)
25
  ootd_model_dc = OOTDiffusionDC(1)
26
 
27
-
28
  category_dict = ['upperbody', 'lowerbody', 'dress']
29
  category_dict_utils = ['upper_body', 'lower_body', 'dresses']
30
 
31
-
32
  example_path = os.path.join(os.path.dirname(__file__), 'examples')
33
  model_hd = os.path.join(example_path, 'model/model_1.png')
34
  garment_hd = os.path.join(example_path, 'garment/03244_00.jpg')
35
  model_dc = os.path.join(example_path, 'model/model_8.png')
36
  garment_dc = os.path.join(example_path, 'garment/048554_1.jpg')
37
 
38
-
39
  import spaces
40
 
41
  @spaces.GPU
42
  def process_hd(vton_img, garm_img, n_samples, n_steps, image_scale, seed):
43
- model_type = 'hd'
44
  category = 0 # 0:upperbody; 1:lowerbody; 2:dress
45
 
46
  with torch.no_grad():
@@ -75,6 +71,7 @@ def process_hd(vton_img, garm_img, n_samples, n_steps, image_scale, seed):
75
 
76
  return images
77
 
 
78
  @spaces.GPU
79
  def process_dc(vton_img, garm_img, category):
80
  model_type = 'dc'
@@ -117,127 +114,53 @@ def process_dc(vton_img, garm_img, category):
117
 
118
  return images
119
 
120
-
121
  block = gr.Blocks().queue()
122
  with block:
123
- with gr.Row():
124
- gr.Markdown("# ")
125
- # with gr.Row():
126
- # gr.Markdown("## Half-body-1")
127
- # with gr.Row():
128
- # gr.Markdown("***Support upper-body garments***")
129
- # with gr.Row():
130
- # with gr.Column():
131
- # vton_img = gr.Image(label="Model", sources='upload', type="filepath", height=384, value=model_hd)
132
- # example = gr.Examples(
133
- # inputs=vton_img,
134
- # examples_per_page=14,
135
- # examples=[
136
- # os.path.join(example_path, 'model/model_1.png'),
137
- # os.path.join(example_path, 'model/model_2.png'),
138
- # os.path.join(example_path, 'model/model_3.png'),
139
- # os.path.join(example_path, 'model/model_4.png'),
140
- # os.path.join(example_path, 'model/model_5.png'),
141
- # os.path.join(example_path, 'model/model_6.png'),
142
- # os.path.join(example_path, 'model/model_7.png'),
143
- # os.path.join(example_path, 'model/01008_00.jpg'),
144
- # os.path.join(example_path, 'model/07966_00.jpg'),
145
- # os.path.join(example_path, 'model/05997_00.jpg'),
146
- # os.path.join(example_path, 'model/02849_00.jpg'),
147
- # os.path.join(example_path, 'model/14627_00.jpg'),
148
- # os.path.join(example_path, 'model/09597_00.jpg'),
149
- # os.path.join(example_path, 'model/01861_00.jpg'),
150
- # ])
151
- # with gr.Column():
152
- # garm_img = gr.Image(label="Garment", sources='upload', type="filepath", height=384, value=garment_hd)
153
- # example = gr.Examples(
154
- # inputs=garm_img,
155
- # examples_per_page=14,
156
- # examples=[
157
- # os.path.join(example_path, 'garment/03244_00.jpg'),
158
- # os.path.join(example_path, 'garment/00126_00.jpg'),
159
- # os.path.join(example_path, 'garment/03032_00.jpg'),
160
- # os.path.join(example_path, 'garment/06123_00.jpg'),
161
- # os.path.join(example_path, 'garment/02305_00.jpg'),
162
- # os.path.join(example_path, 'garment/00055_00.jpg'),
163
- # os.path.join(example_path, 'garment/00470_00.jpg'),
164
- # os.path.join(example_path, 'garment/02015_00.jpg'),
165
- # os.path.join(example_path, 'garment/10297_00.jpg'),
166
- # os.path.join(example_path, 'garment/07382_00.jpg'),
167
- # os.path.join(example_path, 'garment/07764_00.jpg'),
168
- # os.path.join(example_path, 'garment/00151_00.jpg'),
169
- # os.path.join(example_path, 'garment/12562_00.jpg'),
170
- # os.path.join(example_path, 'garment/04825_00.jpg'),
171
- # ])
172
- # with gr.Column():
173
- # result_gallery = gr.Gallery(label='Output', show_label=False, elem_id="gallery", preview=True, scale=1)
174
- # with gr.Column():
175
- # run_button = gr.Button(value="Run")
176
- # n_samples = gr.Slider(label="Images", minimum=1, maximum=4, value=1, step=1)
177
- # n_steps = gr.Slider(label="Steps", minimum=20, maximum=40, value=20, step=1)
178
- # # scale = gr.Slider(label="Scale", minimum=1.0, maximum=12.0, value=5.0, step=0.1)
179
- # image_scale = gr.Slider(label="Guidance scale", minimum=1.0, maximum=5.0, value=2.0, step=0.1)
180
- # seed = gr.Slider(label="Seed", minimum=-1, maximum=2147483647, step=1, value=-1)
181
-
182
- # ips = [vton_img, garm_img, n_samples, n_steps, image_scale, seed]
183
- # run_button.click(fn=process_hd, inputs=ips, outputs=[result_gallery])
184
-
185
-
186
  with gr.Row():
187
  gr.Markdown("## Virtual Trial Room")
188
  with gr.Row():
189
  gr.Markdown("*** Note :- Please Select Garment Type in below drop-down as upper-body/lower-body/dresses;***")
190
  with gr.Row():
191
  with gr.Column():
192
- vton_img_dc = gr.Image(label="Model", sources='upload', type="filepath", height=384, value=model_dc)
193
  example = gr.Examples(
194
  label="Select for Upper/Lower Body",
195
  inputs=vton_img_dc,
196
  examples_per_page=7,
197
  examples=[
198
  os.path.join(example_path, 'model/model_8.png'),
199
- # os.path.join(example_path, 'model/049447_0.jpg'),
200
  os.path.join(example_path, 'model/049713_0.jpg'),
201
- # os.path.join(example_path, 'model/051482_0.jpg'),
202
- # os.path.join(example_path, 'model/051918_0.jpg'),
203
- # os.path.join(example_path, 'model/051962_0.jpg'),
204
- # os.path.join(example_path, 'model/049205_0.jpg'),
205
  ]
206
- )
207
  example = gr.Examples(
208
  label="Select for Full Body Dress",
209
  inputs=vton_img_dc,
210
  examples_per_page=7,
211
  examples=[
212
  os.path.join(example_path, 'model/model_9.png'),
213
- # os.path.join(example_path, 'model/052767_0.jpg'),
214
- # os.path.join(example_path, 'model/052472_0.jpg'),
215
  os.path.join(example_path, 'model/053514_0.jpg'),
216
- # os.path.join(example_path, 'model/053228_0.jpg'),
217
- # os.path.join(example_path, 'model/052964_0.jpg'),
218
- # os.path.join(example_path, 'model/053700_0.jpg'),
219
  ]
220
- )
221
  with gr.Column():
222
- garm_img_dc = gr.Image(label="Garment", sources='upload', type="filepath", height=384, value=garment_dc)
223
  category_dc = gr.Dropdown(label="Garment category (important option!!!)", choices=["Upper-body", "Lower-body", "Dress"], value="Upper-body")
224
  example = gr.Examples(
225
  label="Examples (upper-body)",
226
  inputs=garm_img_dc,
227
  examples_per_page=7,
228
  examples=[
229
- os.path.join(example_path,'garment/01260_00.jpg'),
230
- os.path.join(example_path,'garment/01430_00.jpg'),
231
- os.path.join(example_path,'garment/02783_00.jpg'),
232
- os.path.join(example_path,'garment/03751_00.jpg'),
233
- os.path.join(example_path,'garment/06429_00.jpg'),
234
- os.path.join(example_path,'garment/06802_00.jpg'),
235
- os.path.join(example_path,'garment/07429_00.jpg'),
236
- os.path.join(example_path,'garment/08348_00.jpg'),
237
- os.path.join(example_path,'garment/09933_00.jpg'),
238
- os.path.join(example_path,'garment/11028_00.jpg'),
239
- os.path.join(example_path,'garment/11351_00.jpg'),
240
- os.path.join(example_path,'garment/11791_00.jpg'),
241
  os.path.join(example_path, 'garment/048554_1.jpg'),
242
  os.path.join(example_path, 'garment/049920_1.jpg'),
243
  os.path.join(example_path, 'garment/049965_1.jpg'),
@@ -245,7 +168,8 @@ with block:
245
  os.path.join(example_path, 'garment/050181_1.jpg'),
246
  os.path.join(example_path, 'garment/049805_1.jpg'),
247
  os.path.join(example_path, 'garment/050105_1.jpg'),
248
- ])
 
249
  example = gr.Examples(
250
  label="Examples (lower-body)",
251
  inputs=garm_img_dc,
@@ -258,7 +182,8 @@ with block:
258
  os.path.join(example_path, 'garment/051517_1.jpg'),
259
  os.path.join(example_path, 'garment/051988_1.jpg'),
260
  os.path.join(example_path, 'garment/051412_1.jpg'),
261
- ])
 
262
  example = gr.Examples(
263
  label="Examples (dress)",
264
  inputs=garm_img_dc,
@@ -271,19 +196,14 @@ with block:
271
  os.path.join(example_path, 'garment/053790_1.jpg'),
272
  os.path.join(example_path, 'garment/053319_1.jpg'),
273
  os.path.join(example_path, 'garment/052234_1.jpg'),
274
- ])
 
275
  with gr.Column():
276
- result_gallery_dc = gr.Gallery(label='Output', show_label=False, elem_id="gallery", preview=True, scale=1)
277
  with gr.Column():
278
  run_button_dc = gr.Button(value="Run")
279
- # n_samples_dc = gr.Slider(label="Images", minimum=1, maximum=4, value=1, step=1)
280
- # n_steps_dc = gr.Slider(label="Steps", minimum=20, maximum=40, value=20, step=1)
281
- # scale_dc = gr.Slider(label="Scale", minimum=1.0, maximum=12.0, value=5.0, step=0.1)
282
- # image_scale_dc = gr.Slider(label="Guidance scale", minimum=1.0, maximum=5.0, value=2.0, step=0.1)
283
- # seed_dc = gr.Slider(label="Seed", minimum=-1, maximum=2147483647, step=1, value=-1)
284
-
285
  ips_dc = [vton_img_dc, garm_img_dc, category_dc]
286
  run_button_dc.click(fn=process_dc, inputs=ips_dc, outputs=[result_gallery_dc])
287
 
288
-
289
  block.launch()
 
15
  from ootd.inference_ootd_hd import OOTDiffusionHD
16
  from ootd.inference_ootd_dc import OOTDiffusionDC
17
 
 
18
  openpose_model_hd = OpenPose(0)
19
  parsing_model_hd = Parsing(0)
20
  ootd_model_hd = OOTDiffusionHD(0)
 
23
  parsing_model_dc = Parsing(1)
24
  ootd_model_dc = OOTDiffusionDC(1)
25
 
 
26
  category_dict = ['upperbody', 'lowerbody', 'dress']
27
  category_dict_utils = ['upper_body', 'lower_body', 'dresses']
28
 
 
29
  example_path = os.path.join(os.path.dirname(__file__), 'examples')
30
  model_hd = os.path.join(example_path, 'model/model_1.png')
31
  garment_hd = os.path.join(example_path, 'garment/03244_00.jpg')
32
  model_dc = os.path.join(example_path, 'model/model_8.png')
33
  garment_dc = os.path.join(example_path, 'garment/048554_1.jpg')
34
 
 
35
  import spaces
36
 
37
  @spaces.GPU
38
  def process_hd(vton_img, garm_img, n_samples, n_steps, image_scale, seed):
39
+ model_type = 'hd'
40
  category = 0 # 0:upperbody; 1:lowerbody; 2:dress
41
 
42
  with torch.no_grad():
 
71
 
72
  return images
73
 
74
+
75
  @spaces.GPU
76
  def process_dc(vton_img, garm_img, category):
77
  model_type = 'dc'
 
114
 
115
  return images
116
 
 
117
  block = gr.Blocks().queue()
118
  with block:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
119
  with gr.Row():
120
  gr.Markdown("## Virtual Trial Room")
121
  with gr.Row():
122
  gr.Markdown("*** Note :- Please Select Garment Type in below drop-down as upper-body/lower-body/dresses;***")
123
  with gr.Row():
124
  with gr.Column():
125
+ vton_img_dc = gr.Image(label="Model", sources='upload', type="filepath", height=384, width=300)
126
  example = gr.Examples(
127
  label="Select for Upper/Lower Body",
128
  inputs=vton_img_dc,
129
  examples_per_page=7,
130
  examples=[
131
  os.path.join(example_path, 'model/model_8.png'),
 
132
  os.path.join(example_path, 'model/049713_0.jpg'),
 
 
 
 
133
  ]
134
+ )
135
  example = gr.Examples(
136
  label="Select for Full Body Dress",
137
  inputs=vton_img_dc,
138
  examples_per_page=7,
139
  examples=[
140
  os.path.join(example_path, 'model/model_9.png'),
 
 
141
  os.path.join(example_path, 'model/053514_0.jpg'),
 
 
 
142
  ]
143
+ )
144
  with gr.Column():
145
+ garm_img_dc = gr.Image(label="Garment", sources='upload', type="filepath", height=384, width=300)
146
  category_dc = gr.Dropdown(label="Garment category (important option!!!)", choices=["Upper-body", "Lower-body", "Dress"], value="Upper-body")
147
  example = gr.Examples(
148
  label="Examples (upper-body)",
149
  inputs=garm_img_dc,
150
  examples_per_page=7,
151
  examples=[
152
+ os.path.join(example_path, 'garment/01260_00.jpg'),
153
+ os.path.join(example_path, 'garment/01430_00.jpg'),
154
+ os.path.join(example_path, 'garment/02783_00.jpg'),
155
+ os.path.join(example_path, 'garment/03751_00.jpg'),
156
+ os.path.join(example_path, 'garment/06429_00.jpg'),
157
+ os.path.join(example_path, 'garment/06802_00.jpg'),
158
+ os.path.join(example_path, 'garment/07429_00.jpg'),
159
+ os.path.join(example_path, 'garment/08348_00.jpg'),
160
+ os.path.join(example_path, 'garment/09933_00.jpg'),
161
+ os.path.join(example_path, 'garment/11028_00.jpg'),
162
+ os.path.join(example_path, 'garment/11351_00.jpg'),
163
+ os.path.join(example_path, 'garment/11791_00.jpg'),
164
  os.path.join(example_path, 'garment/048554_1.jpg'),
165
  os.path.join(example_path, 'garment/049920_1.jpg'),
166
  os.path.join(example_path, 'garment/049965_1.jpg'),
 
168
  os.path.join(example_path, 'garment/050181_1.jpg'),
169
  os.path.join(example_path, 'garment/049805_1.jpg'),
170
  os.path.join(example_path, 'garment/050105_1.jpg'),
171
+ ]
172
+ )
173
  example = gr.Examples(
174
  label="Examples (lower-body)",
175
  inputs=garm_img_dc,
 
182
  os.path.join(example_path, 'garment/051517_1.jpg'),
183
  os.path.join(example_path, 'garment/051988_1.jpg'),
184
  os.path.join(example_path, 'garment/051412_1.jpg'),
185
+ ]
186
+ )
187
  example = gr.Examples(
188
  label="Examples (dress)",
189
  inputs=garm_img_dc,
 
196
  os.path.join(example_path, 'garment/053790_1.jpg'),
197
  os.path.join(example_path, 'garment/053319_1.jpg'),
198
  os.path.join(example_path, 'garment/052234_1.jpg'),
199
+ ]
200
+ )
201
  with gr.Column():
202
+ result_gallery_dc = gr.Gallery(label='Output', show_label=False, elem_id="gallery", preview=True, scale=1)
203
  with gr.Column():
204
  run_button_dc = gr.Button(value="Run")
205
+
 
 
 
 
 
206
  ips_dc = [vton_img_dc, garm_img_dc, category_dc]
207
  run_button_dc.click(fn=process_dc, inputs=ips_dc, outputs=[result_gallery_dc])
208
 
 
209
  block.launch()