Spaces:
Sleeping
Sleeping
File size: 3,092 Bytes
7c5b520 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 |
import gradio as gr
import cv2
from ultralytics import YOLO, solutions
# Initialize the YOLO model
model = YOLO("yolov8s.pt")
def process_video(video_path, analytics_type):
cap = cv2.VideoCapture(video_path)
assert cap.isOpened(), "Error reading video file"
# Get video properties
w, h, fps = (int(cap.get(x)) for x in (cv2.CAP_PROP_FRAME_WIDTH, cv2.CAP_PROP_FRAME_HEIGHT, cv2.CAP_PROP_FPS))
output_filename = f"{analytics_type}_output.avi"
out = cv2.VideoWriter(output_filename, cv2.VideoWriter_fourcc(*"MJPG"), fps, (w, h))
# Set up analytics based on the selected type
analytics = solutions.Analytics(
type=analytics_type,
writer=out,
im0_shape=(w, h),
view_img=False
)
clswise_count = {}
frame_count = 0
while cap.isOpened():
success, frame = cap.read()
if success:
frame_count += 1
results = model.track(frame, persist=True, verbose=False)
if results[0].boxes.id is not None:
boxes = results[0].boxes.xyxy.cpu()
clss = results[0].boxes.cls.cpu().tolist()
for box, cls in zip(boxes, clss):
if model.names[int(cls)] in clswise_count:
clswise_count[model.names[int(cls)]] += 1
else:
clswise_count[model.names[int(cls)]] = 1
# Update analytics based on type
if analytics_type == "line":
total_counts = sum(clswise_count.values())
analytics.update_line(frame_count, total_counts)
elif analytics_type == "multiple_line":
analytics.update_multiple_lines(clswise_count, list(clswise_count.keys()), frame_count)
elif analytics_type == "pie":
analytics.update_pie(clswise_count)
elif analytics_type == "area":
analytics.update_area(frame_count, clswise_count)
clswise_count = {} # Reset for next frame
else:
break
cap.release()
out.release()
return output_filename # Return the output video file
def gradio_app(video, analytics_type):
# Save uploaded video locally
video_path = video.name
output_video = process_video(video_path, analytics_type)
# Return processed video for display
return output_video
# Gradio interface
with gr.Blocks() as demo:
gr.Markdown("# YOLO Video Processing App")
with gr.Row():
video_input = gr.Video(label="Upload Video", type="file")
analytics_dropdown = gr.Dropdown(
["line", "multiple_line", "pie", "area"],
label="Select Analytics Type",
value="line"
)
output_video = gr.Video(label="Processed Output")
# Button to start processing
submit_btn = gr.Button("Process Video")
# Define the output when the button is clicked
submit_btn.click(gradio_app, inputs=[video_input, analytics_dropdown], outputs=output_video)
# Launch the Gradio app
demo.launch() |