nej-dot commited on
Commit
aef3692
·
verified ·
1 Parent(s): d4b1080

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -40
app.py CHANGED
@@ -5,18 +5,7 @@ from __future__ import annotations
5
  import gradio as gr
6
  import torch
7
 
8
- from app_canny import create_demo as create_demo_canny
9
- from app_depth import create_demo as create_demo_depth
10
- from app_ip2p import create_demo as create_demo_ip2p
11
- from app_lineart import create_demo as create_demo_lineart
12
- from app_mlsd import create_demo as create_demo_mlsd
13
- from app_normal import create_demo as create_demo_normal
14
- from app_openpose import create_demo as create_demo_openpose
15
- from app_scribble import create_demo as create_demo_scribble
16
- from app_scribble_interactive import create_demo as create_demo_scribble_interactive
17
- from app_segmentation import create_demo as create_demo_segmentation
18
- from app_shuffle import create_demo as create_demo_shuffle
19
- from app_softedge import create_demo as create_demo_softedge
20
 
21
  DESCRIPTION = "# ControlNet v1.1"
22
 
@@ -31,31 +20,28 @@ with gr.Blocks(css="style.css") as demo:
31
  visible=False, # Hiding the duplicate button
32
  )
33
 
34
- with gr.Accordion(label="Preprocessors", open=False):
35
- with gr.TabItem("Canny"):
36
- create_demo_canny()
37
- with gr.TabItem("MLSD"):
38
- create_demo_mlsd()
39
- with gr.TabItem("Scribble"):
40
- create_demo_scribble()
41
- with gr.TabItem("Scribble Interactive"):
42
- create_demo_scribble_interactive()
43
- with gr.TabItem("SoftEdge"):
44
- create_demo_softedge()
45
- with gr.TabItem("OpenPose"):
46
- create_demo_openpose()
47
- with gr.TabItem("Segmentation"):
48
- create_demo_segmentation()
49
- with gr.TabItem("Depth"):
50
- create_demo_depth()
51
- with gr.TabItem("Normal map"):
52
- create_demo_normal()
53
- with gr.TabItem("Lineart"):
54
- create_demo_lineart()
55
- with gr.TabItem("Content Shuffle"):
56
- create_demo_shuffle()
57
- with gr.TabItem("Instruct Pix2Pix"):
58
- create_demo_ip2p()
59
-
60
- if __name__ == "__main__":
61
- demo.queue(max_size=20).launch()
 
5
  import gradio as gr
6
  import torch
7
 
8
+ from preprocessor import Preprocessor
 
 
 
 
 
 
 
 
 
 
 
9
 
10
  DESCRIPTION = "# ControlNet v1.1"
11
 
 
20
  visible=False, # Hiding the duplicate button
21
  )
22
 
23
+ # Instantiate the Preprocessor class
24
+ preprocessor = Preprocessor()
25
+
26
+ # Function to apply preprocessor to input image
27
+ def apply_preprocessor(input_image, preprocessor_name):
28
+ # Load the desired preprocessor
29
+ preprocessor.load(preprocessor_name)
30
+ # Apply the preprocessor to the input image
31
+ processed_image = preprocessor(input_image)
32
+ # Convert processed image from numpy array to PIL image
33
+ processed_image_pil = PIL.Image.fromarray(processed_image)
34
+ return processed_image_pil
35
+
36
+ # Gradio interface
37
+ image = gr.inputs.Image()
38
+ preprocessor_name = gr.inputs.Dropdown(choices=["Canny", "Midas", "MLSD", "Openpose", "PidiNet", "NormalBae", "Lineart", "LineartAnime", "ContentShuffle", "DPT", "UPerNet"], label="Preprocessor")
39
+ run_button = gr.Button("Start")
40
+ output_image = gr.outputs.Image(label="Output")
41
+
42
+ # Define processing function for Gradio
43
+ def process_image(input_image, preprocessor_name):
44
+ return apply_preprocessor(input_image, preprocessor_name)
45
+
46
+ # Link inputs and outputs to function using Gradio's Interface class
47
+ gr.Interface(fn=process_image, inputs=[image, preprocessor_name], outputs=output_image, title="Preprocessor GUI").launch()