Spaces:
Runtime error
Runtime error
File size: 1,744 Bytes
a092fbb adb98a8 a092fbb adb98a8 a092fbb 40f4856 a092fbb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
from transformers import GLPNFeatureExtractor, GLPNForDepthEstimation
from transformers import AutoFeatureExtractor, AutoModelForDepthEstimation
import torch
import numpy as np
from PIL import Image
import requests
import gradio as gr
import os
# url = "http://images.cocodataset.org/val2017/000000039769.jpg"
# image = Image.open(requests.get(url, stream=True).raw)
# feature_extractor = GLPNFeatureExtractor.from_pretrained("vinvino02/glpn-nyu")
# model = GLPNForDepthEstimation.from_pretrained("vinvino02/glpn-nyu")
feature_extractor = AutoFeatureExtractor.from_pretrained("Intel/dpt-large")
model = AutoModelForDepthEstimation.from_pretrained("Intel/dpt-large")
example_list = [["examples/" + example] for example in os.listdir("examples")]
def predict(image):
inputs = feature_extractor(images=image, return_tensors="pt")
with torch.no_grad():
outputs = model(**inputs)
predicted_depth = outputs.predicted_depth
# interpolate to original size
prediction = torch.nn.functional.interpolate(
predicted_depth.unsqueeze(1),
size=image.size[::-1],
mode="bicubic",
align_corners=False,
)
# visualize the prediction
output = prediction.squeeze().cpu().numpy()
formatted = (output * 255 / np.max(output)).astype("uint8")
depth_image = Image.fromarray(formatted)
return depth_image
# Gradio App
title="Depth Estimation GAN"
description="This converts a Normal Image to a Depth Image"
demo=gr.Interface(fn=predict,
inputs=gr.Image(type='pil'),
outputs=gr.Image(type='pil'),
title=title ,
examples=example_list,
description=description)
demo.launch(debug=False) |