Baptlem commited on
Commit
4598830
1 Parent(s): ce09356

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -2
app.py CHANGED
@@ -54,7 +54,7 @@ controlnet_version = "coyo-500k"
54
  low_threshold = 100
55
  high_threshold = 200
56
 
57
- pipe, params = load_sb_pipe(controlnet_version)
58
 
59
  # pipe.enable_xformers_memory_efficient_attention()
60
  # pipe.enable_model_cpu_offload()
@@ -68,10 +68,13 @@ def pipe_inference(
68
  resolution=128,
69
  num_inference_steps=50,
70
  guidance_scale=7.5,
 
71
  seed=0,
72
  negative_prompt="",
73
  ):
74
- print("Entered pipe...")
 
 
75
  if not isinstance(image, np.ndarray):
76
  image = np.array(image)
77
 
@@ -188,6 +191,11 @@ def create_demo(process, max_images=12, default_num_images=4):
188
  maximum=30.0,
189
  value=7.5,
190
  step=0.1)
 
 
 
 
 
191
  seed = gr.Slider(label='Seed',
192
  minimum=-1,
193
  maximum=2147483647,
@@ -213,6 +221,7 @@ def create_demo(process, max_images=12, default_num_images=4):
213
  #canny_high_threshold,
214
  num_steps,
215
  guidance_scale,
 
216
  seed,
217
  n_prompt,
218
  ]
 
54
  low_threshold = 100
55
  high_threshold = 200
56
 
57
+
58
 
59
  # pipe.enable_xformers_memory_efficient_attention()
60
  # pipe.enable_model_cpu_offload()
 
68
  resolution=128,
69
  num_inference_steps=50,
70
  guidance_scale=7.5,
71
+ model="coyo-500k",
72
  seed=0,
73
  negative_prompt="",
74
  ):
75
+ print("Loading pipe")
76
+ pipe, params = load_sb_pipe(model)
77
+
78
  if not isinstance(image, np.ndarray):
79
  image = np.array(image)
80
 
 
191
  maximum=30.0,
192
  value=7.5,
193
  step=0.1)
194
+ model = gr.Dropdown(choices=["coyo-500k", "bridge-2M"],
195
+ value="coyo-500k",
196
+ label="Model used for inference",
197
+ info="Find every models at https://huggingface.co/Baptlem/baptlem-controlnet"
198
+ ),
199
  seed = gr.Slider(label='Seed',
200
  minimum=-1,
201
  maximum=2147483647,
 
221
  #canny_high_threshold,
222
  num_steps,
223
  guidance_scale,
224
+ model,
225
  seed,
226
  n_prompt,
227
  ]