Jiaye Zou commited on
Commit
ca01e50
1 Parent(s): 2b9660f

update: description & examples

Browse files
.gitattributes CHANGED
@@ -1 +1,2 @@
1
  *.png filter=lfs diff=lfs merge=lfs -text
 
 
1
  *.png filter=lfs diff=lfs merge=lfs -text
2
+ *.jpg filter=lfs diff=lfs merge=lfs -text
app.py CHANGED
@@ -24,7 +24,7 @@ description = """
24
  <a href="https://github.com/MapItAnywhere/MapItAnywhere" target="_blank">Code</a>
25
  </h3>
26
  <p align="center">
27
- Mapper generates birds-eye-view maps from first person view monocular images. Try our demo by uploading your own images.
28
  </p>
29
  """
30
 
@@ -110,21 +110,22 @@ def run(input_img):
110
  gravity, camera = calibrator.run(image, exif=exif)
111
 
112
  data = preprocess_pipeline(image, gravity, camera)
113
-
114
  res = model(data)
115
 
116
- plot_images([image], pad=0., adaptive=True)
117
- fig1 = plt.gcf()
118
-
119
  prediction = res['output']
120
  rgb_prediction = one_hot_argmax_to_rgb(prediction, 6).squeeze(0).permute(1, 2, 0).cpu().long().numpy()
121
  valid = res['valid_bev'].squeeze(0)[..., :-1]
122
  rgb_prediction[~valid.cpu().numpy()] = 255
123
 
124
- plot_images([rgb_prediction], pad=0., adaptive=True)
125
- fig2 = plt.gcf()
 
 
126
 
127
- return fig1, fig2
 
 
 
128
 
129
  demo = gr.Interface(
130
  fn=run,
@@ -132,8 +133,8 @@ demo = gr.Interface(
132
  gr.File(file_types=["image"], label="Input Image")
133
  ],
134
  outputs=[
135
- gr.Plot(label="Inputs", format="png"),
136
- gr.Plot(label="Outputs", format="png"),
137
  ],
138
- description=description,)
 
139
  demo.launch(share=False, server_name="0.0.0.0")
 
24
  <a href="https://github.com/MapItAnywhere/MapItAnywhere" target="_blank">Code</a>
25
  </h3>
26
  <p align="center">
27
+ Mapper generates birds-eye-view maps from in-the-wild monocular first-person view images. You can try our demo by uploading your images or using the examples provided. Tip: You can also try out images across the world using <a href="https://www.mapillary.com/app" target="_blank">Mapillary</a> &#128521;
28
  </p>
29
  """
30
 
 
110
  gravity, camera = calibrator.run(image, exif=exif)
111
 
112
  data = preprocess_pipeline(image, gravity, camera)
 
113
  res = model(data)
114
 
 
 
 
115
  prediction = res['output']
116
  rgb_prediction = one_hot_argmax_to_rgb(prediction, 6).squeeze(0).permute(1, 2, 0).cpu().long().numpy()
117
  valid = res['valid_bev'].squeeze(0)[..., :-1]
118
  rgb_prediction[~valid.cpu().numpy()] = 255
119
 
120
+ plot_images([image, rgb_prediction], titles=["Input Image", "Prediction"], pad=2, adaptive=True)
121
+
122
+ return plt.gcf()
123
+
124
 
125
+ examples = [
126
+ ["examples/219538346289766_undistorted.jpg"],
127
+ ["examples/812412413010936_undistorted.jpg"],
128
+ ]
129
 
130
  demo = gr.Interface(
131
  fn=run,
 
133
  gr.File(file_types=["image"], label="Input Image")
134
  ],
135
  outputs=[
136
+ gr.Plot(label="Prediction", format="png"),
 
137
  ],
138
+ description=description,
139
+ examples=examples)
140
  demo.launch(share=False, server_name="0.0.0.0")
examples/219538346289766_undistorted.jpg ADDED

Git LFS Details

  • SHA256: 4b74707e3d0391b090a945dca082f08577ca3fa013bb0c858632042c2885b574
  • Pointer size: 130 Bytes
  • Size of remote file: 91.9 kB
examples/812412413010936_undistorted.jpg ADDED

Git LFS Details

  • SHA256: 09a38947833bbec9487090f5046eb28c471b59bea2f5ed48bb571de397fbaa04
  • Pointer size: 131 Bytes
  • Size of remote file: 110 kB