hb-setosys commited on
Commit
beb309e
·
verified ·
1 Parent(s): a860d86

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -22
app.py CHANGED
@@ -10,8 +10,8 @@ import gradio as gr
10
  MODEL_PATH = "setosys_yolov12x.pt"
11
  model = YOLO(MODEL_PATH)
12
 
13
- # COCO dataset class ID for truck
14
- TRUCK_CLASS_ID = 7 # "truck"
15
 
16
  # Initialize SORT tracker
17
  tracker = Sort()
@@ -38,15 +38,15 @@ def determine_time_interval(video_filename):
38
  print("No keyword match, using default interval: 5") # Debugging
39
  return 5 # Default interval
40
 
41
- def count_unique_trucks(video_path):
42
- """ Counts unique trucks in a video using YOLOv12x and SORT tracking. """
43
  cap = cv2.VideoCapture(video_path)
44
  if not cap.isOpened():
45
  return {"Error": "Unable to open video file."}
46
 
47
  # Reset variables at the start of each analysis
48
- unique_truck_ids = set()
49
- truck_history = {}
50
 
51
  # Get FPS of the video
52
  fps = int(cap.get(cv2.CAP_PROP_FPS))
@@ -87,7 +87,7 @@ def count_unique_trucks(video_path):
87
  confidence = float(box.conf.item()) # Get confidence score
88
 
89
  # Track only trucks
90
- if class_id == TRUCK_CLASS_ID and confidence > CONFIDENCE_THRESHOLD:
91
  x1, y1, x2, y2 = map(int, box.xyxy[0]) # Get bounding box
92
  detections.append([x1, y1, x2, y2, confidence])
93
 
@@ -104,34 +104,34 @@ def count_unique_trucks(video_path):
104
  print(f"Frame {frame_count}: Tracked Objects -> {tracked_objects}")
105
 
106
  for obj in tracked_objects:
107
- truck_id = int(obj[4]) # Unique ID assigned by SORT
108
  x1, y1, x2, y2 = obj[:4] # Get the bounding box coordinates
109
 
110
- truck_center = (x1 + x2) / 2, (y1 + y2) / 2 # Calculate truck center
111
 
112
- # If truck is already in history, check movement distance
113
- if truck_id in truck_history:
114
- last_position = truck_history[truck_id]["position"]
115
- distance = np.linalg.norm(np.array(truck_center) - np.array(last_position))
116
 
117
  if distance > DISTANCE_THRESHOLD:
118
- unique_truck_ids.add(truck_id) # Add only if moved significantly
119
 
120
  else:
121
- # If truck is not in history, add it
122
- truck_history[truck_id] = {
123
  "frame_count": frame_count,
124
- "position": truck_center
125
  }
126
- unique_truck_ids.add(truck_id)
127
 
128
  cap.release()
129
- return {"Total Unique Trucks": len(unique_truck_ids)}
130
 
131
 
132
  # Gradio UI function
133
  def analyze_video(video_file):
134
- result = count_unique_trucks(video_file)
135
  return "\n".join([f"{key}: {value}" for key, value in result.items()])
136
 
137
  # Define Gradio interface
@@ -139,8 +139,8 @@ iface = gr.Interface(
139
  fn=analyze_video,
140
  inputs=gr.Video(label="Upload Video"),
141
  outputs=gr.Textbox(label="Analysis Result"),
142
- title="YOLOv12x Unique Truck Counter",
143
- description="Upload a video to count unique trucks using YOLOv12x and SORT tracking."
144
  )
145
 
146
  # Launch the Gradio app
 
10
  MODEL_PATH = "setosys_yolov12x.pt"
11
  model = YOLO(MODEL_PATH)
12
 
13
+ # COCO dataset class ID for people
14
+ PERSON_CLASS_ID = 0 # "people"
15
 
16
  # Initialize SORT tracker
17
  tracker = Sort()
 
38
  print("No keyword match, using default interval: 5") # Debugging
39
  return 5 # Default interval
40
 
41
+ def count_unique_people(video_path):
42
+ """ Counts unique people in a video using YOLOv12x and SORT tracking. """
43
  cap = cv2.VideoCapture(video_path)
44
  if not cap.isOpened():
45
  return {"Error": "Unable to open video file."}
46
 
47
  # Reset variables at the start of each analysis
48
+ unique_people_ids = set()
49
+ people_history = {}
50
 
51
  # Get FPS of the video
52
  fps = int(cap.get(cv2.CAP_PROP_FPS))
 
87
  confidence = float(box.conf.item()) # Get confidence score
88
 
89
  # Track only trucks
90
+ if class_id == PEOPLE_CLASS_ID and confidence > CONFIDENCE_THRESHOLD:
91
  x1, y1, x2, y2 = map(int, box.xyxy[0]) # Get bounding box
92
  detections.append([x1, y1, x2, y2, confidence])
93
 
 
104
  print(f"Frame {frame_count}: Tracked Objects -> {tracked_objects}")
105
 
106
  for obj in tracked_objects:
107
+ people_id = int(obj[4]) # Unique ID assigned by SORT
108
  x1, y1, x2, y2 = obj[:4] # Get the bounding box coordinates
109
 
110
+ people_center = (x1 + x2) / 2, (y1 + y2) / 2 # Calculate people center
111
 
112
+ # If people is already in history, check movement distance
113
+ if people_id in people_history:
114
+ last_position = people_history[people_id]["position"]
115
+ distance = np.linalg.norm(np.array(people_center) - np.array(last_position))
116
 
117
  if distance > DISTANCE_THRESHOLD:
118
+ unique_people_ids.add(people_id) # Add only if moved significantly
119
 
120
  else:
121
+ # If people is not in history, add it
122
+ people_history[truck_id] = {
123
  "frame_count": frame_count,
124
+ "position": people_center
125
  }
126
+ unique_people_ids.add(people_id)
127
 
128
  cap.release()
129
+ return {"Total Unique People": len(unique_people_ids)}
130
 
131
 
132
  # Gradio UI function
133
  def analyze_video(video_file):
134
+ result = count_unique_people(video_file)
135
  return "\n".join([f"{key}: {value}" for key, value in result.items()])
136
 
137
  # Define Gradio interface
 
139
  fn=analyze_video,
140
  inputs=gr.Video(label="Upload Video"),
141
  outputs=gr.Textbox(label="Analysis Result"),
142
+ title="YOLOv12x Unique People Counter",
143
+ description="Upload a video to count unique people using YOLOv12x and SORT tracking."
144
  )
145
 
146
  # Launch the Gradio app