merve HF staff commited on
Commit
81d10ae
1 Parent(s): 7741d6e

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +44 -0
app.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import pipeline
3
+ import torch
4
+ import numpy as np
5
+ from PIL import Image
6
+ import gradio as gr
7
+ from gradio_client import Client
8
+ import os
9
+ import json
10
+ import spaces
11
+
12
+ dpt_beit = pipeline(task = "depth-estimation", model="Intel/dpt-beit-base-384")
13
+ depth_anything = pipeline(task = "depth-estimation", model="nielsr/depth-anything-small")
14
+ @spaces.GPU
15
+ def depth_anything_inference(image_path):
16
+ return depth_anything(image_path)["depth"]
17
+
18
+ @spaces.GPU
19
+ def dpt_beit_inference(image):
20
+ return dpt_beit(image)["depth"]
21
+
22
+ def dpt_large(image_path):
23
+ try:
24
+ client = Client("https://nielsr-dpt-depth-estimation.hf.space/")
25
+ return Image.open(client.predict(image_path))
26
+ except Exception:
27
+ gr.Warning("The DPT-Large Space is currently unavailable. Please try again later.")
28
+ return ""
29
+
30
+
31
+ def infer(image):
32
+ return dpt_large(image), dpt_beit_inference(image), depth_anything_inference(image)
33
+
34
+
35
+ iface = gr.Interface(fn=infer,
36
+ inputs=gr.Image(type="pil"),
37
+ outputs=[gr.Image(type="pil", label="DPT-Large"),
38
+ gr.Image(type="pil", label="DPT with BeiT Backbone"),
39
+ gr.Image(type="pil", label="Depth Anything")],
40
+
41
+ title="Compare Depth Estimation Models",
42
+ description="In this Space you can compare various depth estimation models",
43
+ examples=[["bee.JPG"]])
44
+ iface.launch(debug=True)