Spaces:
Sleeping
Sleeping
import gradio as gr | |
import torch | |
from utils import colorize | |
from PIL import Image | |
import tempfile | |
DEVICE = 'cuda' if torch.cuda.is_available() else 'cpu' | |
model = torch.hub.load('isl-org/ZoeDepth', "ZoeD_N", pretrained=True).to(DEVICE).eval() | |
def predict_depth(model, image): | |
depth = model.infer_pil(image) | |
return depth | |
def on_submit(image): | |
depth = predict_depth(model, image) | |
colored_depth = colorize(depth, cmap='gray_r') | |
tmp = tempfile.NamedTemporaryFile(suffix='.png', delete=False) | |
raw_depth = Image.fromarray((depth*256).astype('uint16')) | |
raw_depth.save(tmp.name) | |
return [colored_depth, tmp.name] | |
iface = gr.Interface( | |
fn=on_submit, | |
inputs=gr.inputs.Image(type='pil', label="Input Image"), | |
outputs=[ | |
gr.outputs.Image(type='numpy', label="Depth Map"), | |
gr.outputs.File(label="16-bit raw depth, multiplier:256") | |
], | |
title="# ZoeDepth", | |
description="""Unofficial demo for **ZoeDepth: Zero-shot Transfer by Combining Relative and Metric Depth**.""", | |
css=""" | |
#img-display-container { | |
max-height: 50vh; | |
} | |
#img-display-input { | |
max-height: 40vh; | |
} | |
#img-display-output { | |
max-height: 40vh; | |
} | |
""" | |
) | |
if __name__ == '__main__': | |
iface.launch() |