taesiri commited on
Commit
f3cff0c
β€’
1 Parent(s): 2008ce0
README.md CHANGED
@@ -4,10 +4,11 @@ emoji: πŸ“š
4
  colorFrom: red
5
  colorTo: yellow
6
  sdk: gradio
 
7
  app_file: app.py
8
  pinned: false
9
  ---
10
 
11
  # Convolutional Hough Matching Networks
12
 
13
- A demo for Convolutional Hough Matching Networks. [[Paper](https://arxiv.org/abs/2109.05221)] [[Official Github Repo](https://github.com/juhongm999/chm.git)]
4
  colorFrom: red
5
  colorTo: yellow
6
  sdk: gradio
7
+ sdk_version: 3.0.5
8
  app_file: app.py
9
  pinned: false
10
  ---
11
 
12
  # Convolutional Hough Matching Networks
13
 
14
+ A demo for Convolutional Hough Matching Networks. [[Paper](https://arxiv.org/abs/2109.05221)] [[Official Github Repo](https://github.com/juhongm999/chm.git)]
app.py CHANGED
@@ -26,7 +26,13 @@ from model import chmnet
26
  from model.base.geometry import Geometry
27
 
28
  # Downloading the Model
29
- gdown.download(id="1zsJRlAsoOn5F0GTCprSFYwDDfV85xDy6", output="pas_psi.pt", quiet=False)
 
 
 
 
 
 
30
 
31
  # Model Initialization
32
  args = dict(
@@ -189,27 +195,56 @@ def generate_correspondences(
189
  )
190
 
191
 
192
- # GRADIO APP
193
- title = "Correspondence Matching with Convolutional Hough Matching Networks "
194
- description = "Performs keypoint transform from a 7x7 gird on the source image to the target image. Use the sliders to adjust the grid."
195
- article = "<p style='text-align: center'><a href='https://github.com/juhongm999/chm' target='_blank'>Original Github Repo</a></p>"
196
-
197
- iface = gr.Interface(
198
  fn=generate_correspondences,
199
  inputs=[
200
- gr.inputs.Image(shape=(240, 240), type="pil"),
201
- gr.inputs.Image(shape=(240, 240), type="pil"),
202
- gr.inputs.Slider(minimum=1, maximum=240, step=1, default=15, label="Min X"),
203
- gr.inputs.Slider(minimum=1, maximum=240, step=1, default=215, label="Max X"),
204
- gr.inputs.Slider(minimum=1, maximum=240, step=1, default=15, label="Min Y"),
205
- gr.inputs.Slider(minimum=1, maximum=240, step=1, default=215, label="Max Y"),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
206
  ],
207
- outputs=[gr.outputs.Plot(type="matplotlib")],
208
- enable_queue=True,
209
- title=title,
210
- description=description,
211
- article=article,
212
- examples=[["sample1.jpeg", "sample2.jpeg", 15, 215, 15, 215]],
213
  )
214
 
215
- iface.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
  from model.base.geometry import Geometry
27
 
28
  # Downloading the Model
29
+ # gdown.download(id="1zsJRlAsoOn5F0GTCprSFYwDDfV85xDy6", output="pas_psi.pt", quiet=False)
30
+
31
+ md5 = "6b7b4d7bad7f89600fac340d6aa7708b"
32
+
33
+ gdown.cached_download(
34
+ url="1zsJRlAsoOn5F0GTCprSFYwDDfV85xDy6", path="pas_psi.pt", quiet=False, md5=md5
35
+ )
36
 
37
  # Model Initialization
38
  args = dict(
195
  )
196
 
197
 
198
+ # Gradio App
199
+ main = gr.Interface(
 
 
 
 
200
  fn=generate_correspondences,
201
  inputs=[
202
+ gr.Image(shape=(240, 240), type="pil"),
203
+ gr.Image(shape=(240, 240), type="pil"),
204
+ gr.Slider(minimum=1, maximum=240, step=1, default=15, label="Min X"),
205
+ gr.Slider(minimum=1, maximum=240, step=1, default=215, label="Max X"),
206
+ gr.Slider(minimum=1, maximum=240, step=1, default=15, label="Min Y"),
207
+ gr.Slider(minimum=1, maximum=240, step=1, default=215, label="Max Y"),
208
+ ],
209
+ allow_flagging="never",
210
+ outputs="plot",
211
+ examples=[
212
+ ["./examples/sample1.jpeg", "./examples/sample2.jpeg", 17, 223, 17, 223],
213
+ [
214
+ "./examples/Red_Winged_Blackbird_0012_6015.jpg",
215
+ "./examples/Red_Winged_Blackbird_0025_5342.jpg",
216
+ 17,
217
+ 223,
218
+ 17,
219
+ 223,
220
+ ],
221
+ [
222
+ "./examples/Yellow_Headed_Blackbird_0026_8545.jpg",
223
+ "./examples/Yellow_Headed_Blackbird_0020_8549.jpg",
224
+ 17,
225
+ 223,
226
+ 17,
227
+ 223,
228
+ ],
229
  ],
 
 
 
 
 
 
230
  )
231
 
232
+
233
+ blocks = gr.Blocks()
234
+ with blocks:
235
+
236
+ gr.Markdown(
237
+ """
238
+ # Correspondence Matching with Convolutional Hough Matching Networks
239
+ Performs keypoint transform from a 7x7 gird on the source image to the target image. Use the sliders to adjust the grid.
240
+ [Original Paper](https://arxiv.org/abs/2103.16831) - [Github Page](https://github.com/juhongm999/chm)
241
+ """
242
+ )
243
+
244
+ gr.TabbedInterface([main], ["Main"])
245
+
246
+
247
+ blocks.launch(
248
+ debug=True,
249
+ enable_queue=False,
250
+ )
examples/Red_Winged_Blackbird_0012_6015.jpg ADDED
examples/Red_Winged_Blackbird_0025_5342.jpg ADDED
examples/Yellow_Headed_Blackbird_0020_8549.jpg ADDED
examples/Yellow_Headed_Blackbird_0026_8545.jpg ADDED
sample1.jpeg β†’ examples/sample1.jpeg RENAMED
File without changes
sample2.jpeg β†’ examples/sample2.jpeg RENAMED
File without changes
requirements.txt CHANGED
@@ -1,11 +1,12 @@
1
- gradio==2.4.5
2
- matplotlib==3.4.3
3
- numpy==1.21.2
4
  pandas==1.3.4
5
- Pillow==8.4.0
6
  requests==2.26.0
7
  scipy==1.7.1
8
- tensorboardX==2.4.1
9
- torch==1.10.0
10
- torchvision==0.11.1
 
 
 
 
11
  gdown
1
+ gradio==3.0.5
 
 
2
  pandas==1.3.4
 
3
  requests==2.26.0
4
  scipy==1.7.1
5
+ tensorboardX==2.5
6
+ matplotlib==3.5.2
7
+ numpy==1.21.5
8
+ Pillow==9.0.1
9
+ scikit-image==0.19.2
10
+ torch==1.11.0
11
+ torchvision==0.12.0
12
  gdown