Spaces:
Runtime error
Runtime error
File size: 6,575 Bytes
7eae14d df68bc9 c6bd81c 7eae14d 38143c3 7eae14d b63dad7 55b0a96 bde6a50 1316866 49f0112 7eae14d b63dad7 7eae14d bde6a50 7eae14d a608033 55b0a96 7eae14d 55b0a96 7eae14d d3b866b 7eae14d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
import json
import gradio as gr
import yolov5
from PIL import Image
from huggingface_hub import hf_hub_download
import os
app_title = "Detect defects in bird nest jar"
models_ids = ['linhcuem/defects_bird_nest_jar_yolov5m_23_05','linhcuem/defects_nest_bird_jar_yolov5x_24_5','linhcuem/defects_bird_nest_jar_yolov5m6', 'linhcuem/defects_bird_nest_jar_yolov5x', 'linhcuem/defects_bird_nest_jar_yolov5s6' , 'linhcuem/defects_nest_bird_jar_yolov5l6']
current_model_id = models_ids[-1]
model = yolov5.load(current_model_id)
# model_yolov8 = YOLO(DEFAULT_DET_MODEL_ID_yolov8)
examples = [['test_images/16823291638707408-a2A2448-23gmBAS_40174045.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/16823292102253310-a2A2448-23gmBAS_40174046.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/16823291808953550-a2A2448-23gmBAS_40174048.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/16823291801532480-a2A2448-23gmBAS_40174048.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'],
['test_images/Image__2022-12-26__16-38-43.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-39-20.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-39-30.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-39-40.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-39-49.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'],
['test_images/Image__2022-12-26__16-40-00.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-40-10.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-40-22.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-40-31.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-40-31.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'],
['test_images/Image__2022-12-26__16-40-41.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-40-41.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-40-55.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-41-09.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-41-24.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'],
['test_images/Image__2022-12-26__16-41-33.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-41-50.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-42-00.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-42-11.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-42-21.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'],
['test_images/Image__2022-12-26__16-42-31.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-42-43.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-42-56.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-43-06.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-44-08.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'],
['test_images/Image__2022-12-26__16-44-30.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-44-41.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-44-50.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-45-20.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-45-31.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'],
['test_images/Image__2022-12-26__16-45-43.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-45-51.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-46-00.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-46-09.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-46-20.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'],
['test_images/Image__2022-12-26__16-46-46.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-46-56.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-47-04.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-47-13.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-47-22.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'],
['test_images/Image__2022-12-26__16-47-40.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05'], ['test_images/Image__2022-12-26__16-48-24.jpg', 0.25, 'linhcuem/defects_bird_nest_jar_yolov5m_23_05']]
def predict(image, threshold=0.25, model_id=None):
#update model if required
global current_model_id
global model
if model_id != current_model_id:
model = yolov5.load(model_id)
# model_yolov8 = YOLO(DEFAULT_DET_MODEL_ID_yolov8)
current_model_id = model_id
# get model input size
config_path = hf_hub_download(repo_id=model_id, filename="config.json")
with open(config_path, "r") as f:
config = json.load(f)
input_size = config["input_size"]
#perform inference
model.conf = threshold
results = model(image, size=input_size)
numpy_image = results.render()[0]
output_image = Image.fromarray(numpy_image)
return output_image
gr.Interface(
title=app_title,
description="DO ANH DAT",
fn=predict,
inputs=[
gr.Image(type="pil"),
gr.Slider(maximum=1, step=0.01, value=0.25),
gr.Dropdown(models_ids, value=models_ids[-1]),
],
outputs=gr.Image(type="pil"),
examples=examples,
cache_examples=True if examples else Fale,
).launch(enable_queue=True) |