keremberke
commited on
Commit
•
5002fd1
1
Parent(s):
1c81e18
upload space files
Browse files- README.md +2 -2
- app.py +53 -0
- requirements.txt +5 -0
- test_images/32LSCZQDHZO7_jpg.rf.8fddaa4b5ed4db87d19a32d4554b9c23.jpg +0 -0
- test_images/6URLZIZIQ6S0_jpg.rf.4661cb4082077e616ec94250eea6328f.jpg +0 -0
- test_images/I91P1I5WNUZT_jpg.rf.c5c49a5f421751c30008a35e7b52087e.jpg +0 -0
- test_images/JWF31R9STW0L_jpg.rf.a785b0107b333fe746fe1c4c8d2f744f.jpg +0 -0
- test_images/LTDX8N8ZKBT2_jpg.rf.6e09889a432d15c19fa0fbdbb62d347f.jpg +0 -0
- test_images/M_01685_png.rf.57a2823eabfa135c0a508d18faa70ce3.jpg +0 -0
- test_images/M_03339_png.rf.f755ccc7bdf2a0ebc7e4553a0576ed50.jpg +0 -0
- test_images/M_04045_png.rf.31bd5eed4b55dbcafe568210774cb5dc.jpg +0 -0
- test_images/M_04302_png.rf.62eabd3a1cc0dbfcdffa9c5a9582f77c.jpg +0 -0
- test_images/M_07555_png.rf.9c2d725a383658227bc87891f68fe975.jpg +0 -0
- test_images/M_08430_png.rf.64508c4e583f64ac2cd431c99dc79834.jpg +0 -0
- test_images/V75EBJ0AG2HV_jpg.rf.88822c95c57d6bfb33092eb5ec0a020c.jpg +0 -0
README.md
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
---
|
2 |
title: Forklift Object Detection
|
3 |
-
emoji:
|
4 |
-
colorFrom:
|
5 |
colorTo: gray
|
6 |
sdk: gradio
|
7 |
sdk_version: 3.15.0
|
|
|
1 |
---
|
2 |
title: Forklift Object Detection
|
3 |
+
emoji: 🎮
|
4 |
+
colorFrom: red
|
5 |
colorTo: gray
|
6 |
sdk: gradio
|
7 |
sdk_version: 3.15.0
|
app.py
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import json
|
3 |
+
import gradio as gr
|
4 |
+
import yolov5
|
5 |
+
from PIL import Image
|
6 |
+
from huggingface_hub import hf_hub_download
|
7 |
+
|
8 |
+
app_title = "Forklift Object Detection"
|
9 |
+
models_ids = ['keremberke/yolov5n-forklift', 'keremberke/yolov5s-forklift', 'keremberke/yolov5m-forklift']
|
10 |
+
article = f"<p style='text-align: center'> <a href='https://huggingface.co/{models_ids[-1]}'>model</a> | <a href='https://huggingface.co/keremberke/forklift-object-detection'>dataset</a> | <a href='https://github.com/keremberke/awesome-yolov5-models'>awesome-yolov5-models</a> </p>"
|
11 |
+
|
12 |
+
current_model_id = models_ids[-1]
|
13 |
+
model = yolov5.load(current_model_id)
|
14 |
+
|
15 |
+
examples = [['test_images/32LSCZQDHZO7_jpg.rf.8fddaa4b5ed4db87d19a32d4554b9c23.jpg', 0.25, 'keremberke/yolov5m-forklift'], ['test_images/6URLZIZIQ6S0_jpg.rf.4661cb4082077e616ec94250eea6328f.jpg', 0.25, 'keremberke/yolov5m-forklift'], ['test_images/I91P1I5WNUZT_jpg.rf.c5c49a5f421751c30008a35e7b52087e.jpg', 0.25, 'keremberke/yolov5m-forklift'], ['test_images/JWF31R9STW0L_jpg.rf.a785b0107b333fe746fe1c4c8d2f744f.jpg', 0.25, 'keremberke/yolov5m-forklift'], ['test_images/LTDX8N8ZKBT2_jpg.rf.6e09889a432d15c19fa0fbdbb62d347f.jpg', 0.25, 'keremberke/yolov5m-forklift'], ['test_images/M_01685_png.rf.57a2823eabfa135c0a508d18faa70ce3.jpg', 0.25, 'keremberke/yolov5m-forklift'], ['test_images/M_03339_png.rf.f755ccc7bdf2a0ebc7e4553a0576ed50.jpg', 0.25, 'keremberke/yolov5m-forklift'], ['test_images/M_04045_png.rf.31bd5eed4b55dbcafe568210774cb5dc.jpg', 0.25, 'keremberke/yolov5m-forklift'], ['test_images/M_04302_png.rf.62eabd3a1cc0dbfcdffa9c5a9582f77c.jpg', 0.25, 'keremberke/yolov5m-forklift'], ['test_images/M_07555_png.rf.9c2d725a383658227bc87891f68fe975.jpg', 0.25, 'keremberke/yolov5m-forklift'], ['test_images/M_08430_png.rf.64508c4e583f64ac2cd431c99dc79834.jpg', 0.25, 'keremberke/yolov5m-forklift'], ['test_images/V75EBJ0AG2HV_jpg.rf.88822c95c57d6bfb33092eb5ec0a020c.jpg', 0.25, 'keremberke/yolov5m-forklift']]
|
16 |
+
|
17 |
+
|
18 |
+
def predict(image, threshold=0.25, model_id=None):
|
19 |
+
# update model if required
|
20 |
+
global current_model_id
|
21 |
+
global model
|
22 |
+
if model_id != current_model_id:
|
23 |
+
model = yolov5.load(model_id)
|
24 |
+
current_model_id = model_id
|
25 |
+
|
26 |
+
# get model input size
|
27 |
+
config_path = hf_hub_download(repo_id=model_id, filename="config.json")
|
28 |
+
with open(config_path, "r") as f:
|
29 |
+
config = json.load(f)
|
30 |
+
input_size = config["input_size"]
|
31 |
+
|
32 |
+
# perform inference
|
33 |
+
model.conf = threshold
|
34 |
+
results = model(image, size=input_size)
|
35 |
+
numpy_image = results.render()[0]
|
36 |
+
output_image = Image.fromarray(numpy_image)
|
37 |
+
return output_image
|
38 |
+
|
39 |
+
|
40 |
+
gr.Interface(
|
41 |
+
title=app_title,
|
42 |
+
description="Created by 'keremberke'",
|
43 |
+
article=article,
|
44 |
+
fn=predict,
|
45 |
+
inputs=[
|
46 |
+
gr.Image(type="pil"),
|
47 |
+
gr.Slider(maximum=1, step=0.01, value=0.25),
|
48 |
+
gr.Dropdown(models_ids, value=models_ids[-1]),
|
49 |
+
],
|
50 |
+
outputs=gr.Image(type="pil"),
|
51 |
+
examples=examples,
|
52 |
+
cache_examples=True if examples else False,
|
53 |
+
).launch(enable_queue=True)
|
requirements.txt
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
yolov5==7.0.5
|
3 |
+
gradio==3.15.0
|
4 |
+
torch
|
5 |
+
huggingface-hub
|
test_images/32LSCZQDHZO7_jpg.rf.8fddaa4b5ed4db87d19a32d4554b9c23.jpg
ADDED
test_images/6URLZIZIQ6S0_jpg.rf.4661cb4082077e616ec94250eea6328f.jpg
ADDED
test_images/I91P1I5WNUZT_jpg.rf.c5c49a5f421751c30008a35e7b52087e.jpg
ADDED
test_images/JWF31R9STW0L_jpg.rf.a785b0107b333fe746fe1c4c8d2f744f.jpg
ADDED
test_images/LTDX8N8ZKBT2_jpg.rf.6e09889a432d15c19fa0fbdbb62d347f.jpg
ADDED
test_images/M_01685_png.rf.57a2823eabfa135c0a508d18faa70ce3.jpg
ADDED
test_images/M_03339_png.rf.f755ccc7bdf2a0ebc7e4553a0576ed50.jpg
ADDED
test_images/M_04045_png.rf.31bd5eed4b55dbcafe568210774cb5dc.jpg
ADDED
test_images/M_04302_png.rf.62eabd3a1cc0dbfcdffa9c5a9582f77c.jpg
ADDED
test_images/M_07555_png.rf.9c2d725a383658227bc87891f68fe975.jpg
ADDED
test_images/M_08430_png.rf.64508c4e583f64ac2cd431c99dc79834.jpg
ADDED
test_images/V75EBJ0AG2HV_jpg.rf.88822c95c57d6bfb33092eb5ec0a020c.jpg
ADDED