|
|
import streamlit as st |
|
|
from streamlit_webrtc import webrtc_streamer, VideoTransformerBase |
|
|
import mediapipe as mp |
|
|
import cv2 |
|
|
import av |
|
|
|
|
|
|
|
|
mp_hands = mp.solutions.hands |
|
|
mp_drawing = mp.solutions.drawing_utils |
|
|
|
|
|
class VideoProcessor(VideoTransformerBase): |
|
|
def __init__(self): |
|
|
|
|
|
self.hands = mp_hands.Hands( |
|
|
max_num_hands=2, |
|
|
min_detection_confidence=0.5, |
|
|
min_tracking_confidence=0.5, |
|
|
) |
|
|
|
|
|
def recv(self, frame): |
|
|
|
|
|
img = frame.to_ndarray(format="bgr24") |
|
|
img = cv2.flip(img, 1) |
|
|
img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) |
|
|
|
|
|
|
|
|
result = self.hands.process(img_rgb) |
|
|
|
|
|
|
|
|
if result.multi_hand_landmarks: |
|
|
for hand_landmarks in result.multi_hand_landmarks: |
|
|
mp_drawing.draw_landmarks( |
|
|
img, hand_landmarks, mp_hands.HAND_CONNECTIONS |
|
|
) |
|
|
|
|
|
|
|
|
return av.VideoFrame.from_ndarray(img, format="bgr24") |
|
|
|
|
|
|
|
|
st.title("Gesture & Hand Landmark Detection π") |
|
|
st.write("This app uses MediaPipe and Streamlit to detect hand landmarks in real-time from your webcam.") |
|
|
|
|
|
|
|
|
webrtc_streamer( |
|
|
key="gesture-detection", |
|
|
video_processor_factory=VideoProcessor, |
|
|
rtc_configuration={ |
|
|
"iceServers": [{"urls": ["stun:stun.l.google.com:19302"]}] |
|
|
}, |
|
|
) |
|
|
|
|
|
|
|
|
st.markdown( |
|
|
""" |
|
|
<style> |
|
|
.footer {text-align: center; font-size: 12px; color: grey; margin-top: 20px;} |
|
|
</style> |
|
|
<p class="footer">Made with g Streamlit & MediaPipe</p> |
|
|
""", |
|
|
unsafe_allow_html=True, |
|
|
) |
|
|
|