Spaces:
Sleeping
Sleeping
File size: 2,344 Bytes
84dcfe3 2fea6fc 2ff1618 84dcfe3 2ff1618 2fea6fc ed1c000 2fea6fc 84dcfe3 927cfc3 84dcfe3 ed1c000 84dcfe3 2fea6fc 84dcfe3 2fea6fc 611a237 84dcfe3 2fea6fc 2ff1618 2fea6fc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 |
import gradio as gr
import spaces
from huggingface_hub import hf_hub_download
import yolov9
def yolov9_inference(img_path, model_id, image_size, conf_threshold, iou_threshold):
# Load the model
# model_path = download_models(model_id)
model = yolov9.load(model_id)
# Set model parameters
model.conf = conf_threshold
model.iou = iou_threshold
# Perform inference
results = model(img_path, size=image_size)
# Optionally, show detection bounding boxes on image
output = results.render()
return output[0]
def app():
with gr.Blocks():
with gr.Row():
with gr.Column():
img_path = gr.Image(type="filepath", label="Image")
model_path = gr.Dropdown(
label="Model",
choices=[
"best.pt",
],
value="./best.pt",
)
image_size = gr.Slider(
label="Image Size",
minimum=320,
maximum=1280,
step=32,
value=640,
)
conf_threshold = gr.Slider(
label="Confidence Threshold",
minimum=0.1,
maximum=1.0,
step=0.1,
value=0.4,
)
iou_threshold = gr.Slider(
label="IoU Threshold",
minimum=0.1,
maximum=1.0,
step=0.1,
value=0.5,
)
yolov9_infer = gr.Button(value="Inference")
with gr.Column():
output_numpy = gr.Image(type="numpy",label="Output")
yolov9_infer.click(
fn=yolov9_inference,
inputs=[
img_path,
model_path,
image_size,
conf_threshold,
iou_threshold,
],
outputs=[output_numpy],
)
gradio_app = gr.Blocks()
with gradio_app:
gr.HTML(
"""
<h1 style='text-align: center'>
YOLOv9: Detect Void Space in Retail Shelf
</h1>
""")
with gr.Row():
with gr.Column():
app()
gradio_app.launch(debug=True)
|