flamehaze1115 commited on
Commit
0ed298c
1 Parent(s): 5b00e3f

Upload 2 files

Browse files
Files changed (2) hide show
  1. gradio_app.py +30 -10
  2. requirements.txt +1 -2
gradio_app.py CHANGED
@@ -215,7 +215,7 @@ def run_pipeline(pipeline, cfg, single_image, guidance_scale, steps, seed, crop_
215
  images_pred = [save_image(images_pred[i]) for i in range(bsz)]
216
 
217
  out = images_pred + normals_pred
218
- return images_pred, normals_pred
219
 
220
 
221
  @dataclass
@@ -285,7 +285,7 @@ def run_demo():
285
  gr.Examples(
286
  examples=example_fns,
287
  inputs=[input_image],
288
- #outputs=[input_image],
289
  cache_examples=False,
290
  label='Examples (click one of the images below to start)',
291
  examples_per_page=30
@@ -297,12 +297,15 @@ def run_demo():
297
  with gr.Accordion('Advanced options', open=True):
298
  with gr.Row():
299
  with gr.Column():
300
- input_processing = gr.CheckboxGroup(['Background Removal'], label='Input Image Preprocessing', value=['Background Removal'])
 
 
 
301
  with gr.Column():
302
  output_processing = gr.CheckboxGroup(['Background Removal'], label='Output Image Postprocessing', value=[])
303
  with gr.Row():
304
  with gr.Column():
305
- scale_slider = gr.Slider(1, 10, value=3, step=1,
306
  label='Classifier Free Guidance Scale')
307
  with gr.Column():
308
  steps_slider = gr.Slider(15, 100, value=50, step=1,
@@ -311,21 +314,38 @@ def run_demo():
311
  with gr.Column():
312
  seed = gr.Number(42, label='Seed')
313
  with gr.Column():
314
- crop_size = gr.Number(192, label='Crop size')
315
  # crop_size = 192
316
  run_btn = gr.Button('Generate', variant='primary', interactive=True)
317
  with gr.Row():
318
- view_gallery = gr.Gallery(interactive=False,show_label=False, container=True, preview=True, allow_preview=True, height=400)
319
- normal_gallery = gr.Gallery(interactive=False,show_label=False, container=True, preview=True, allow_preview=True, height=400 )
 
 
 
 
 
 
 
 
 
 
 
320
 
321
- first_stage = run_btn.click(fn=partial(preprocess, predictor),
 
 
 
 
 
322
  inputs=[input_image, input_processing],
323
  outputs=[processed_image_highres, processed_image], queue=True
324
  ).success(fn=partial(run_pipeline, pipeline, cfg),
325
  inputs=[processed_image_highres, scale_slider, steps_slider, seed, crop_size],
326
- outputs=[view_gallery, normal_gallery]
 
 
327
  )
328
-
329
  demo.queue().launch(share=True, max_threads=80)
330
 
331
 
 
215
  images_pred = [save_image(images_pred[i]) for i in range(bsz)]
216
 
217
  out = images_pred + normals_pred
218
+ return *out, images_pred, normals_pred
219
 
220
 
221
  @dataclass
 
285
  gr.Examples(
286
  examples=example_fns,
287
  inputs=[input_image],
288
+ # outputs=[input_image],
289
  cache_examples=False,
290
  label='Examples (click one of the images below to start)',
291
  examples_per_page=30
 
297
  with gr.Accordion('Advanced options', open=True):
298
  with gr.Row():
299
  with gr.Column():
300
+ input_processing = gr.CheckboxGroup(['Background Removal'],
301
+ label='Input Image Preprocessing',
302
+ value=['Background Removal'],
303
+ info='untick this, if masked image with alpha channel')
304
  with gr.Column():
305
  output_processing = gr.CheckboxGroup(['Background Removal'], label='Output Image Postprocessing', value=[])
306
  with gr.Row():
307
  with gr.Column():
308
+ scale_slider = gr.Slider(1, 5, value=3, step=1,
309
  label='Classifier Free Guidance Scale')
310
  with gr.Column():
311
  steps_slider = gr.Slider(15, 100, value=50, step=1,
 
314
  with gr.Column():
315
  seed = gr.Number(42, label='Seed')
316
  with gr.Column():
317
+ crop_size = gr.Number(210, label='Crop size')
318
  # crop_size = 192
319
  run_btn = gr.Button('Generate', variant='primary', interactive=True)
320
  with gr.Row():
321
+ view_1 = gr.Image(interactive=False, height=240, show_label=False)
322
+ view_2 = gr.Image(interactive=False, height=240, show_label=False)
323
+ view_3 = gr.Image(interactive=False, height=240, show_label=False)
324
+ view_4 = gr.Image(interactive=False, height=240, show_label=False)
325
+ view_5 = gr.Image(interactive=False, height=240, show_label=False)
326
+ view_6 = gr.Image(interactive=False, height=240, show_label=False)
327
+ with gr.Row():
328
+ normal_1 = gr.Image(interactive=False, height=240, show_label=False)
329
+ normal_2 = gr.Image(interactive=False, height=240, show_label=False)
330
+ normal_3 = gr.Image(interactive=False, height=240, show_label=False)
331
+ normal_4 = gr.Image(interactive=False, height=240, show_label=False)
332
+ normal_5 = gr.Image(interactive=False, height=240, show_label=False)
333
+ normal_6 = gr.Image(interactive=False, height=240, show_label=False)
334
 
335
+ with gr.Row():
336
+ view_gallery = gr.Gallery(interactive=False, show_label=False, container=True, preview=True, allow_preview=True, height=400)
337
+ normal_gallery = gr.Gallery(interactive=False, show_label=False, container=True, preview=True, allow_preview=True, height=400)
338
+
339
+
340
+ run_btn.click(fn=partial(preprocess, predictor),
341
  inputs=[input_image, input_processing],
342
  outputs=[processed_image_highres, processed_image], queue=True
343
  ).success(fn=partial(run_pipeline, pipeline, cfg),
344
  inputs=[processed_image_highres, scale_slider, steps_slider, seed, crop_size],
345
+ outputs=[view_1, view_2, view_3, view_4, view_5, view_6,
346
+ normal_1, normal_2, normal_3, normal_4, normal_5, normal_6,
347
+ view_gallery, normal_gallery]
348
  )
 
349
  demo.queue().launch(share=True, max_threads=80)
350
 
351
 
requirements.txt CHANGED
@@ -28,5 +28,4 @@ torch_efficient_distloss
28
  tensorboard
29
  rembg
30
  segment_anything
31
- streamlit==1.22.0
32
- fire
 
28
  tensorboard
29
  rembg
30
  segment_anything
31
+