LiheYoung hysts HF staff commited on
Commit
4bb0120
1 Parent(s): bf5a40f

Add slider view and cache examples (#3)

Browse files

- Add slider view (21401baacb957c78a138966bc35a6ef8db6af5d5)
- Cache examples (392f705912d2c6bd326dfcbe79becb9f3cfc7792)


Co-authored-by: hysts <hysts@users.noreply.huggingface.co>

Files changed (2) hide show
  1. app.py +9 -6
  2. requirements.txt +2 -1
app.py CHANGED
@@ -8,6 +8,7 @@ import torch
8
  import torch.nn.functional as F
9
  from torchvision.transforms import Compose
10
  import tempfile
 
11
 
12
  from depth_anything.dpt import DPT_DINOv2
13
  from depth_anything.util.transform import Resize, NormalizeImage, PrepareForNet
@@ -58,11 +59,13 @@ with gr.Blocks(css=css) as demo:
58
 
59
  with gr.Row():
60
  input_image = gr.Image(label="Input Image", type='numpy', elem_id='img-display-input')
61
- depth_image = gr.Image(label="Depth Map", elem_id='img-display-output')
62
  raw_file = gr.File(label="16-bit raw depth (can be considered as disparity)")
63
  submit = gr.Button("Submit")
64
 
65
  def on_submit(image):
 
 
66
  h, w = image.shape[:2]
67
 
68
  image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) / 255.0
@@ -80,16 +83,16 @@ with gr.Blocks(css=css) as demo:
80
  depth = depth.cpu().numpy().astype(np.uint8)
81
  colored_depth = cv2.applyColorMap(depth, cv2.COLORMAP_INFERNO)[:, :, ::-1]
82
 
83
- return [colored_depth, tmp.name]
 
 
84
 
85
- submit.click(on_submit, inputs=[input_image], outputs=[depth_image, raw_file])
86
-
87
  example_files = os.listdir('examples')
88
  example_files.sort()
89
  example_files = [os.path.join('examples', filename) for filename in example_files]
90
- examples = gr.Examples(examples=example_files, inputs=[input_image])
91
 
92
 
93
  if __name__ == '__main__':
94
  demo.queue().launch()
95
-
 
8
  import torch.nn.functional as F
9
  from torchvision.transforms import Compose
10
  import tempfile
11
+ from gradio_imageslider import ImageSlider
12
 
13
  from depth_anything.dpt import DPT_DINOv2
14
  from depth_anything.util.transform import Resize, NormalizeImage, PrepareForNet
 
59
 
60
  with gr.Row():
61
  input_image = gr.Image(label="Input Image", type='numpy', elem_id='img-display-input')
62
+ depth_image_slider = ImageSlider(label="Depth Map with Slider View", elem_id='img-display-output', position=0)
63
  raw_file = gr.File(label="16-bit raw depth (can be considered as disparity)")
64
  submit = gr.Button("Submit")
65
 
66
  def on_submit(image):
67
+ original_image = image.copy()
68
+
69
  h, w = image.shape[:2]
70
 
71
  image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) / 255.0
 
83
  depth = depth.cpu().numpy().astype(np.uint8)
84
  colored_depth = cv2.applyColorMap(depth, cv2.COLORMAP_INFERNO)[:, :, ::-1]
85
 
86
+ return [(original_image, colored_depth), tmp.name]
87
+
88
+ submit.click(on_submit, inputs=[input_image], outputs=[depth_image_slider, raw_file])
89
 
 
 
90
  example_files = os.listdir('examples')
91
  example_files.sort()
92
  example_files = [os.path.join('examples', filename) for filename in example_files]
93
+ examples = gr.Examples(examples=example_files, inputs=[input_image], outputs=[depth_image_slider, raw_file], fn=on_submit, cache_examples=True)
94
 
95
 
96
  if __name__ == '__main__':
97
  demo.queue().launch()
98
+
requirements.txt CHANGED
@@ -1,3 +1,4 @@
 
1
  torch
2
  torchvision
3
- opencv-python
 
1
+ gradio_imageslider
2
  torch
3
  torchvision
4
+ opencv-python