ZoeDepth / app.py
RamAnanth1's picture
minor UI change
e9a07d6
import gradio as gr
import torch
from utils import get_image_from_url, colorize
from PIL import Image
import matplotlib.pyplot as plt
title = "Interactive demo: ZoeDepth"
description = "Unofficial Gradio Demo for using ZoeDepth: Zero-shot Transfer by Combining Relative and Metric Depth. ZoeDepth is a technique that lets you perform metric depth estimation from a single image. For more information, please refer to the <a href='https://arxiv.org/abs/2302.12288' style='text-decoration: underline;' target='_blank'> paper</a> or the <a href='https://github.com/isl-org/ZoeDepth' style='text-decoration: underline;' target='_blank'> Github </a> implementation. </p> To use it, simply upload an image or use one of the examples below and click 'Submit'. Results will show up in a few seconds."
examples = [["example.png"],["example_2.png"]]
repo = "isl-org/ZoeDepth"
# Zoe_N
model_zoe_n = torch.hub.load(repo, "ZoeD_NK", pretrained=True)
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
zoe = model_zoe_n.to(DEVICE)
def process_image(image):
depth = zoe.infer_pil(image) # as numpy
colored_depth = colorize(depth, cmap = 'magma_r')
return colored_depth
interface = gr.Interface(fn=process_image,
inputs=[gr.Image(type="pil")],
outputs=[gr.Image(type="pil", label ="Depth")
],
title=title,
description=description,
examples = examples
)
interface.launch(debug=True)