#!/usr/bin/env python from __future__ import annotations import pathlib import math import gradio as gr import cv2 import mediapipe as mp import numpy as np mp_drawing = mp.solutions.drawing_utils mp_drawing_styles = mp.solutions.drawing_styles mp_pose = mp.solutions.pose TITLE = "MediaPipe Human Pose Estimation" DESCRIPTION = "https://google.github.io/mediapipe/" def calculateAngle(landmark1, landmark2, landmark3): ''' This function calculates angle between three different landmarks. Args: landmark1: The first landmark containing the x,y and z coordinates. landmark2: The second landmark containing the x,y and z coordinates. landmark3: The third landmark containing the x,y and z coordinates. Returns: angle: The calculated angle between the three landmarks. ''' # Get the required landmarks coordinates. x1, y1 = landmark1.x, landmark1.y x2, y2 = landmark2.x, landmark2.y x3, y3 = landmark3.x, landmark3.y # Calculate the angle between the three points angle = math.degrees(math.atan2(y3 - y2, x3 - x2) - math.atan2(y1 - y2, x1 - x2)) # angle = abs(angle) # Convert the angle to an absolute value. # Check if the angle is less than zero. if angle < 0: # Add 360 to the found angle. angle += 360 # Return the calculated angle. return angle def classifyPose(landmarks, output_image, display=False): ''' This function classifies yoga poses depending upon the angles of various body joints. Args: landmarks: A list of detected landmarks of the person whose pose needs to be classified. output_image: A image of the person with the detected pose landmarks drawn. display: A boolean value that is if set to true the function displays the resultant image with the pose label written on it and returns nothing. Returns: output_image: The image with the detected pose landmarks drawn and pose label written. label: The classified pose label of the person in the output_image. ''' # Initialize the label of the pose. It is not known at this stage. label = 'Unknown Pose' # Specify the color (Red) with which the label will be written on the image. color = (0, 0, 255) # Calculate the required angles. #---------------------------------------------------------------------------------------------------------------- # Get the angle between the left shoulder, elbow and wrist points. left_elbow_angle = calculateAngle(landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value], landmarks[mp_pose.PoseLandmark.LEFT_ELBOW.value], landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value]) # Get the angle between the right shoulder, elbow and wrist points. right_elbow_angle = calculateAngle(landmarks[mp_pose.PoseLandmark.RIGHT_SHOULDER.value], landmarks[mp_pose.PoseLandmark.RIGHT_ELBOW.value], landmarks[mp_pose.PoseLandmark.RIGHT_WRIST.value]) # Get the angle between the left elbow, shoulder and hip points. left_shoulder_angle = calculateAngle(landmarks[mp_pose.PoseLandmark.LEFT_ELBOW.value], landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value], landmarks[mp_pose.PoseLandmark.LEFT_HIP.value]) # Get the angle between the right hip, shoulder and elbow points. right_shoulder_angle = calculateAngle(landmarks[mp_pose.PoseLandmark.RIGHT_HIP.value], landmarks[mp_pose.PoseLandmark.RIGHT_SHOULDER.value], landmarks[mp_pose.PoseLandmark.RIGHT_ELBOW.value]) # Get the angle between the left hip, knee and ankle points. left_knee_angle = calculateAngle(landmarks[mp_pose.PoseLandmark.LEFT_HIP.value], landmarks[mp_pose.PoseLandmark.LEFT_KNEE.value], landmarks[mp_pose.PoseLandmark.LEFT_ANKLE.value]) # Get the angle between the right hip, knee and ankle points right_knee_angle = calculateAngle(landmarks[mp_pose.PoseLandmark.RIGHT_HIP.value], landmarks[mp_pose.PoseLandmark.RIGHT_KNEE.value], landmarks[mp_pose.PoseLandmark.RIGHT_ANKLE.value]) #---------------------------------------------------------------------------------------------------------------- # Check for Five-Pointed Star Pose if abs(landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value].y - landmarks[mp_pose.PoseLandmark.LEFT_HIP.value].y) < 100 and \ abs(landmarks[mp_pose.PoseLandmark.RIGHT_WRIST.value].y - landmarks[mp_pose.PoseLandmark.RIGHT_HIP.value].y) < 100 and \ abs(landmarks[mp_pose.PoseLandmark.LEFT_ANKLE.value].x - landmarks[mp_pose.PoseLandmark.RIGHT_ANKLE.value].x) > 200 and \ abs(landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value].x - landmarks[mp_pose.PoseLandmark.RIGHT_WRIST.value].x) > 200: label = "Five-Pointed Star Pose" # Check if it is the warrior II pose or the T pose. if left_elbow_angle > 165 and left_elbow_angle < 195 and right_elbow_angle > 165 and right_elbow_angle < 195: if left_shoulder_angle > 80 and left_shoulder_angle < 110 and right_shoulder_angle > 80 and right_shoulder_angle < 110: if left_knee_angle > 165 and left_knee_angle < 195 or right_knee_angle > 165 and right_knee_angle < 195: if left_knee_angle > 90 and left_knee_angle < 120 or right_knee_angle > 90 and right_knee_angle < 120: label = 'Warrior II Pose' if left_knee_angle > 160 and left_knee_angle < 195 and right_knee_angle > 160 and right_knee_angle < 195: label = 'T Pose' # Check if it is the tree pose. if left_knee_angle > 165 and left_knee_angle < 195 or right_knee_angle > 165 and right_knee_angle < 195: if left_knee_angle > 315 and left_knee_angle < 335 or right_knee_angle > 25 and right_knee_angle < 45: label = 'Tree Pose' # Check for Upward Salute Pose if abs(landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value].x - landmarks[mp_pose.PoseLandmark.LEFT_HIP.value].x) < 100 and \ abs(landmarks[mp_pose.PoseLandmark.RIGHT_WRIST.value].x - landmarks[mp_pose.PoseLandmark.RIGHT_HIP.value].x) < 100 and \ landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value].y < landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value].y and \ landmarks[mp_pose.PoseLandmark.RIGHT_WRIST.value].y < landmarks[mp_pose.PoseLandmark.RIGHT_SHOULDER.value].y and \ abs(landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value].y - landmarks[mp_pose.PoseLandmark.RIGHT_SHOULDER.value].y) < 50: label = "Upward Salute Pose" # Check for Hands Under Feet Pose if landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value].y > landmarks[mp_pose.PoseLandmark.LEFT_KNEE.value].y and \ landmarks[mp_pose.PoseLandmark.RIGHT_WRIST.value].y > landmarks[mp_pose.PoseLandmark.RIGHT_KNEE.value].y and \ abs(landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value].x - landmarks[mp_pose.PoseLandmark.LEFT_ANKLE.value].x) < 50 and \ abs(landmarks[mp_pose.PoseLandmark.RIGHT_WRIST.value].x - landmarks[mp_pose.PoseLandmark.RIGHT_ANKLE.value].x) < 50: label = "Hands Under Feet Pose" #---------------------------------------------------------------------------------------------------------------- # Check if the pose is classified successfully if label != 'Unknown Pose': # Update the color (to green) with which the label will be written on the image. color = (0, 255, 0) # Write the label on the output image. cv2.putText(output_image, label, (220, 30),cv2.FONT_HERSHEY_PLAIN, 2, color, 2) # Check if the resultant image is specified to be displayed. if display: # Display the resultant image. plt.figure(figsize=[10,10]) plt.imshow(output_image[:,:,::-1]);plt.title("Output Image");plt.axis('off'); else: # Return the output image and the classified label. return output_image, label def run( image: np.ndarray, model_complexity: int, enable_segmentation: bool, min_detection_confidence: float, background_color: str, ) -> np.ndarray: with mp_pose.Pose( static_image_mode=True, model_complexity=model_complexity, enable_segmentation=enable_segmentation, min_detection_confidence=min_detection_confidence, ) as pose: results = pose.process(image) res = image[:, :, ::-1].copy() if enable_segmentation: if background_color == "white": bg_color = 255 elif background_color == "black": bg_color = 0 elif background_color == "green": bg_color = (0, 255, 0) # type: ignore else: raise ValueError if results.segmentation_mask is not None: res[results.segmentation_mask <= 0.1] = bg_color else: res[:] = bg_color mp_drawing.draw_landmarks( res, results.pose_landmarks, mp_pose.POSE_CONNECTIONS, landmark_drawing_spec=mp_drawing_styles.get_default_pose_landmarks_style(), ) if results.pose_landmarks: res, pose_classification = classifyPose(results.pose_landmarks.landmark, res) #Pose Classification code return res[:, :, ::-1] model_complexities = list(range(3)) background_colors = ["white", "black", "green"] image_paths = sorted(pathlib.Path("images").rglob("*.jpg")) examples = [[path, model_complexities[1], True, 0.5, background_colors[0]] for path in image_paths] demo = gr.Interface( fn=run, inputs=[ gr.Image(label="Input", type="numpy"), gr.Radio(label="Model Complexity", choices=model_complexities, type="index", value=model_complexities[1]), gr.Checkbox(label="Enable Segmentation", value=True), gr.Slider(label="Minimum Detection Confidence", minimum=0, maximum=1, step=0.05, value=0.5), gr.Radio(label="Background Color", choices=background_colors, type="value", value=background_colors[0]), ], outputs=gr.Image(label="Output"), examples=examples, title=TITLE, description=DESCRIPTION, ) if __name__ == "__main__": demo.queue().launch()