Spaces:
Runtime error
Runtime error
File size: 712 Bytes
339d133 4d78218 339d133 03a517d 339d133 03a517d 339d133 4d78218 339d133 03a517d 4d78218 2642a92 4d78218 f1fe0df 4d78218 f312c0e 339d133 4d78218 339d133 4d78218 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
import torch
import cv2
import numpy as np
import gradio as gr
from PIL import Image
model = torch.hub.load('ultralytics/yolov5', 'yolov5s', pretrained=True)
model.conf = 0.25
model.iou = 0.45
model.agnostic = False
model.multi_label = False
model.max_det = 1000
def detect(img):
results = model(img, size=640)
predictions = results.pred[0]
boxes = predictions[:, :4] # x1, y1, x2, y2
scores = predictions[:, 4]
categories = predictions[:, 5]
new_image = npnp.squeeze(results.render())
print(new_image.shape)
return new_image
img = gr.inputs.Image(shape=(192, 192))
#intf = gr.Interface(fn=detect, inputs=img, outputs='image')
#intf.launch(inline=False)
|