vu0018 commited on
Commit
cc9d1c9
·
verified ·
1 Parent(s): 297c292

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +54 -44
app.py CHANGED
@@ -1,69 +1,79 @@
1
  import cv2
2
- import numpy as np
3
  import mediapipe as mp
 
4
  import gradio as gr
5
 
6
- # Initialize Mediapipe Pose
7
  mp_pose = mp.solutions.pose
8
  mp_drawing = mp.solutions.drawing_utils
9
- pose = mp_pose.Pose(static_image_mode=True, min_detection_confidence=0.5)
10
 
11
- def calculate_angle(a, b, c):
12
- """Calculate angle between three points (for body extension)."""
13
- a = np.array(a) # Point A
14
- b = np.array(b) # Joint (Point B)
15
- c = np.array(c) # Point C
16
 
 
 
 
 
17
  ba = a - b
18
  bc = c - b
19
-
20
  cosine_angle = np.dot(ba, bc) / (np.linalg.norm(ba) * np.linalg.norm(bc))
21
  angle = np.arccos(cosine_angle)
22
  return np.degrees(angle)
23
 
24
- def detect_pose(image):
25
- """Detect pose in an image and return annotated image + example angle."""
26
- image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
27
- results = pose.process(image_rgb)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
 
29
- annotated = image.copy()
30
- angle_text = "No person detected"
 
 
 
 
 
 
 
31
 
32
- if results.pose_landmarks:
33
- # Draw pose landmarks
34
- mp_drawing.draw_landmarks(
35
- annotated,
36
- results.pose_landmarks,
37
- mp_pose.POSE_CONNECTIONS,
38
- mp_drawing.DrawingSpec(color=(0, 255, 0), thickness=2, circle_radius=2),
39
- mp_drawing.DrawingSpec(color=(0, 0, 255), thickness=2)
40
- )
41
 
42
- # Example: Calculate elbow angle (shoulder-elbow-wrist)
43
- h, w, _ = image.shape
44
- landmarks = results.pose_landmarks.landmark
45
- shoulder = [landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value].x * w,
46
- landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value].y * h]
47
- elbow = [landmarks[mp_pose.PoseLandmark.LEFT_ELBOW.value].x * w,
48
- landmarks[mp_pose.PoseLandmark.LEFT_ELBOW.value].y * h]
49
- wrist = [landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value].x * w,
50
- landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value].y * h]
51
 
52
- angle = calculate_angle(shoulder, elbow, wrist)
53
- angle_text = f"Left Elbow Angle: {int(angle)}°"
54
 
55
- cv2.putText(annotated, angle_text, (20, 40),
56
- cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 2)
 
 
 
 
 
57
 
58
- return annotated[:, :, ::-1], angle_text # Convert BGR → RGB for Gradio
59
 
60
- # Gradio Interface
61
  demo = gr.Interface(
62
- fn=detect_pose,
63
- inputs=gr.Image(type="numpy", label="Upload Image"),
64
- outputs=[gr.Image(type="numpy", label="Pose Estimation"), gr.Textbox(label="Body Extension Angle")],
65
- title="Human Pose Estimation with MediaPipe",
66
- description="Upload an image with a person doing exercise. The app will detect body pose and calculate joint angles."
67
  )
68
 
69
  if __name__ == "__main__":
 
1
  import cv2
 
2
  import mediapipe as mp
3
+ import numpy as np
4
  import gradio as gr
5
 
 
6
  mp_pose = mp.solutions.pose
7
  mp_drawing = mp.solutions.drawing_utils
 
8
 
9
+ pose = mp_pose.Pose(static_image_mode=False, min_detection_confidence=0.5, min_tracking_confidence=0.5)
 
 
 
 
10
 
11
+ def calculate_angle(a, b, c):
12
+ a = np.array(a)
13
+ b = np.array(b)
14
+ c = np.array(c)
15
  ba = a - b
16
  bc = c - b
 
17
  cosine_angle = np.dot(ba, bc) / (np.linalg.norm(ba) * np.linalg.norm(bc))
18
  angle = np.arccos(cosine_angle)
19
  return np.degrees(angle)
20
 
21
+ def detect_pose_video(video_path):
22
+ cap = cv2.VideoCapture(video_path)
23
+ output_frames = []
24
+
25
+ while True:
26
+ ret, frame = cap.read()
27
+ if not ret:
28
+ break
29
+
30
+ frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
31
+ results = pose.process(frame_rgb)
32
+
33
+ if results.pose_landmarks:
34
+ mp_drawing.draw_landmarks(
35
+ frame,
36
+ results.pose_landmarks,
37
+ mp_pose.POSE_CONNECTIONS,
38
+ mp_drawing.DrawingSpec(color=(0,255,0), thickness=2, circle_radius=2),
39
+ mp_drawing.DrawingSpec(color=(0,0,255), thickness=2)
40
+ )
41
 
42
+ # Example: left elbow angle
43
+ h, w, _ = frame.shape
44
+ landmarks = results.pose_landmarks.landmark
45
+ shoulder = [landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value].x * w,
46
+ landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value].y * h]
47
+ elbow = [landmarks[mp_pose.PoseLandmark.LEFT_ELBOW.value].x * w,
48
+ landmarks[mp_pose.PoseLandmark.LEFT_ELBOW.value].y * h]
49
+ wrist = [landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value].x * w,
50
+ landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value].y * h]
51
 
52
+ angle = calculate_angle(shoulder, elbow, wrist)
53
+ cv2.putText(frame, f"Left Elbow: {int(angle)} deg", (20,40),
54
+ cv2.FONT_HERSHEY_SIMPLEX, 1, (255,255,0), 2)
 
 
 
 
 
 
55
 
56
+ output_frames.append(frame)
 
 
 
 
 
 
 
 
57
 
58
+ cap.release()
 
59
 
60
+ # Convert frames to video file
61
+ out_path = "annotated_video.mp4"
62
+ fourcc = cv2.VideoWriter_fourcc(*'mp4v')
63
+ out = cv2.VideoWriter(out_path, fourcc, 20.0, (frame.shape[1], frame.shape[0]))
64
+ for f in output_frames:
65
+ out.write(f)
66
+ out.release()
67
 
68
+ return out_path
69
 
70
+ # Gradio interface
71
  demo = gr.Interface(
72
+ fn=detect_pose_video,
73
+ inputs=gr.Video(label="Upload Video"),
74
+ outputs=gr.Video(label="Annotated Video"),
75
+ title="Human Pose Estimation on Video",
76
+ description="Upload a video and see pose landmarks & joint angles detected in real-time."
77
  )
78
 
79
  if __name__ == "__main__":