sea45 commited on
Commit
4dd7449
1 Parent(s): d4d3c51

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -9
app.py CHANGED
@@ -1,18 +1,40 @@
1
  import gradio as gr
2
- from transformers import pipeline
 
 
3
  from PIL import Image
4
  import requests
 
5
 
6
  def greet(name):
7
- # load pipe
8
- pipe = pipeline(task="depth-estimation", model="LiheYoung/depth-anything-small-hf")
9
-
10
- # load image
11
- url = 'http://images.cocodataset.org/val2017/000000039769.jpg'
12
  image = Image.open(requests.get(url, stream=True).raw)
13
-
14
- # inference
15
- depth = pipe(image)["depth"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  return name+": " + depth
17
 
18
  iface = gr.Interface(fn=greet, inputs="text", outputs="text")
 
1
  import gradio as gr
2
+
3
+ import torch
4
+ import numpy as np
5
  from PIL import Image
6
  import requests
7
+ from transformers import AutoImageProcessor, AutoModelForDepthEstimation
8
 
9
  def greet(name):
10
+
11
+
12
+
13
+ url = "http://images.cocodataset.org/val2017/000000039769.jpg"
 
14
  image = Image.open(requests.get(url, stream=True).raw)
15
+
16
+ image_processor = AutoImageProcessor.from_pretrained("LiheYoung/depth-anything-small-hf")
17
+ model = AutoModelForDepthEstimation.from_pretrained("LiheYoung/depth-anything-small-hf")
18
+
19
+ # prepare image for the model
20
+ inputs = image_processor(images=image, return_tensors="pt")
21
+
22
+ with torch.no_grad():
23
+ outputs = model(**inputs)
24
+ predicted_depth = outputs.predicted_depth
25
+
26
+ # interpolate to original size
27
+ prediction = torch.nn.functional.interpolate(
28
+ predicted_depth.unsqueeze(1),
29
+ size=image.size[::-1],
30
+ mode="bicubic",
31
+ align_corners=False,
32
+ )
33
+
34
+ # visualize the prediction
35
+ output = prediction.squeeze().cpu().numpy()
36
+ formatted = (output * 255 / np.max(output)).astype("uint8")
37
+ depth = Image.fromarray(formatted)
38
  return name+": " + depth
39
 
40
  iface = gr.Interface(fn=greet, inputs="text", outputs="text")