RanjithkumarPanjabikesan's picture
Create app.py
1b46a9b verified
raw
history blame contribute delete
847 Bytes
from transformers import pipeline
depth_estimator = pipeline(task="depth-estimation",
model="Intel/dpt-hybrid-midas")
import os
from PIL import Image
import torch
import numpy as np
import gradio as gr
def launch(input_image):
out = depth_estimator(input_image)
# resize the prediction
prediction = torch.nn.functional.interpolate(
out["predicted_depth"].unsqueeze(1),
size=input_image.size[::-1],
mode="bicubic",
align_corners=False,
)
# normalize the prediction
output = prediction.squeeze().numpy()
formatted = (output * 255 / np.max(output)).astype("uint8")
depth = Image.fromarray(formatted)
return depth
iface = gr.Interface(launch,
inputs=gr.Image(type='pil'),
outputs=gr.Image(type='pil'))
iface.launch()