Spaces:
Runtime error
Runtime error
Bhaskar Saranga
commited on
Commit
•
ce4a448
1
Parent(s):
e9a052f
added v7 & v8 comparision
Browse files
app.py
CHANGED
@@ -173,7 +173,10 @@ def inference2(video,model_link,iou_threshold,confidence_threshold):
|
|
173 |
finalVideo.release()
|
174 |
return 'output.mp4',np.mean(fps_video)
|
175 |
|
176 |
-
|
|
|
|
|
|
|
177 |
|
178 |
examples_images = ['data/images/1.jpg',
|
179 |
'data/images/2.jpg',
|
@@ -208,9 +211,22 @@ with gr.Blocks() as demo:
|
|
208 |
gr.Examples(examples=examples_videos,inputs=video_input,outputs=video_output)
|
209 |
video_button = gr.Button("Detect")
|
210 |
|
211 |
-
|
212 |
-
|
213 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
214 |
|
215 |
text_button.click(inference, inputs=[image_input,image_drop,
|
216 |
image_iou_threshold,image_conf_threshold],
|
@@ -218,5 +234,9 @@ with gr.Blocks() as demo:
|
|
218 |
video_button.click(inference2, inputs=[video_input,video_drop,
|
219 |
video_iou_threshold,video_conf_threshold],
|
220 |
outputs=[video_output,fps_video])
|
|
|
|
|
|
|
|
|
221 |
|
222 |
demo.launch(debug=True,enable_queue=True)
|
|
|
173 |
finalVideo.release()
|
174 |
return 'output.mp4',np.mean(fps_video)
|
175 |
|
176 |
+
def inference_comp(image,iou_threshold,confidence_threshold):
|
177 |
+
v8_out, v8_fps = inference(image, "yolov8m",image_comp_iou_threshold,image_comp_conf_threshold)
|
178 |
+
v7_out, v7_fps = inference(image, "yolov7",image_comp_iou_threshold,image_comp_conf_threshold)
|
179 |
+
return v7_out,v8_out,v7_fps,v8_fps
|
180 |
|
181 |
examples_images = ['data/images/1.jpg',
|
182 |
'data/images/2.jpg',
|
|
|
211 |
gr.Examples(examples=examples_videos,inputs=video_input,outputs=video_output)
|
212 |
video_button = gr.Button("Detect")
|
213 |
|
214 |
+
with gr.Tab("Compare Models"):
|
215 |
+
gr.Markdown("## YOLOv7 vs YOLOv8 Object detection comparision")
|
216 |
+
with gr.Row():
|
217 |
+
image_comp_input = gr.Image(type='pil', label="Input Image", source="upload")
|
218 |
+
text_comp_button = gr.Button("Detect")
|
219 |
+
gr.Examples(examples=examples_images,inputs=image_input,outputs=image_output)
|
220 |
+
with gr.Row():
|
221 |
+
image_comp_iou_threshold = gr.Slider(label="IOU Threshold",interactive=True, minimum=0.0, maximum=1.0, value=0.45)
|
222 |
+
image_comp_conf_threshold = gr.Slider(label="Confidence Threshold",interactive=True, minimum=0.0, maximum=1.0, value=0.25)
|
223 |
+
with gr.Row():
|
224 |
+
image_comp_output_v7 = gr.Image(type='pil', label="YOLOv7 Output Image", source="upload")
|
225 |
+
image_comp_output_v8 = gr.Image(type='pil', label="YOLOv8 Output Image", source="upload")
|
226 |
+
with gr.row():
|
227 |
+
v7_fps_image = gr.Number(0,label='v7 FPS')
|
228 |
+
v8_fps_image = gr.Number(0,label='v8 FPS')
|
229 |
+
|
230 |
|
231 |
text_button.click(inference, inputs=[image_input,image_drop,
|
232 |
image_iou_threshold,image_conf_threshold],
|
|
|
234 |
video_button.click(inference2, inputs=[video_input,video_drop,
|
235 |
video_iou_threshold,video_conf_threshold],
|
236 |
outputs=[video_output,fps_video])
|
237 |
+
text_comp_button.click(inference_comp,input=[image_comp_input,
|
238 |
+
image_comp_iou_threshold,
|
239 |
+
image_comp_conf_threshold],
|
240 |
+
outputs=[image_comp_output_v7,image_comp_output_v8,v7_fps_image,v8_fps_image])
|
241 |
|
242 |
demo.launch(debug=True,enable_queue=True)
|