jaimin commited on
Commit
1d6f0ac
·
verified ·
1 Parent(s): bcc2582

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +121 -0
app.py ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from scipy.spatial import distance as dist
3
+ from imutils import face_utils
4
+ import numpy as np
5
+ import imutils
6
+ import time
7
+ import dlib
8
+ import cv2
9
+ from keras.preprocessing.image import img_to_array
10
+ from keras.models import load_model
11
+
12
+ # Define global variables
13
+ points = []
14
+ emotion_classifier = None
15
+
16
+ def eye_brow_distance(leye, reye):
17
+ global points
18
+ distq = dist.euclidean(leye, reye)
19
+ points.append(int(distq))
20
+ return distq
21
+
22
+ def emotion_finder(faces, frame):
23
+ global emotion_classifier
24
+ EMOTIONS = ["angry", "disgust", "scared", "happy", "sad", "surprised", "neutral"]
25
+ x, y, w, h = face_utils.rect_to_bb(faces)
26
+ frame = frame[y:y + h, x:x + w]
27
+ roi = cv2.resize(frame, (64, 64))
28
+ roi = roi.astype("float") / 255.0
29
+ roi = img_to_array(roi)
30
+ roi = np.expand_dims(roi, axis=0)
31
+ preds = emotion_classifier.predict(roi)[0]
32
+ emotion_probability = np.max(preds)
33
+ label = EMOTIONS[preds.argmax()]
34
+ return label
35
+
36
+ def normalize_values(points, disp):
37
+ normalized_value = abs(disp - np.min(points)) / abs(np.max(points) - np.min(points))
38
+ stress_value = np.exp(-(normalized_value))
39
+ return stress_value
40
+
41
+ def stress(video_path, duration):
42
+ global points, emotion_classifier
43
+ detector = dlib.get_frontal_face_detector()
44
+ predictor_path = "Stress_detection/shape_predictor_68_face_landmarks.dat"
45
+ predictor = dlib.shape_predictor(predictor_path)
46
+ emotion_classifier = load_model("Stress_detection/XCEPTION.102-0.66.hdf5", compile=False)
47
+
48
+ # Open video file
49
+ cap = cv2.VideoCapture(video_path)
50
+ points = []
51
+ stress_labels = []
52
+ start_time = time.time()
53
+
54
+ while True:
55
+ current_time = time.time()
56
+ if current_time - start_time >= duration:
57
+ break
58
+
59
+ ret, frame = cap.read()
60
+ if not ret:
61
+ break
62
+
63
+ frame = cv2.flip(frame, 1)
64
+ frame = imutils.resize(frame, width=500, height=500)
65
+
66
+ (lBegin, lEnd) = face_utils.FACIAL_LANDMARKS_IDXS["right_eyebrow"]
67
+ (rBegin, rEnd) = face_utils.FACIAL_LANDMARKS_IDXS["left_eyebrow"]
68
+
69
+ gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
70
+
71
+ try:
72
+ detections = detector(gray, 0)
73
+ for detection in detections:
74
+ emotion = emotion_finder(detection, gray)
75
+ shape = predictor(gray, detection)
76
+ shape = face_utils.shape_to_np(shape)
77
+
78
+ leyebrow = shape[lBegin:lEnd]
79
+ reyebrow = shape[rBegin:rEnd]
80
+
81
+ distq = eye_brow_distance(leyebrow[-1], reyebrow[0])
82
+ stress_value = normalize_values(points, distq)
83
+
84
+ # Determine stress label for this frame
85
+ if emotion in ['scared', 'sad', 'angry'] and stress_value >= 0.75:
86
+ stress_label = 'stressed'
87
+ else:
88
+ stress_label = 'not stressed'
89
+
90
+ # Store stress label in list
91
+ stress_labels.append(stress_label)
92
+
93
+ except Exception as e:
94
+ print(f'Error: {e}')
95
+
96
+ cap.release()
97
+
98
+ # Count occurrences of 'stressed' and 'not stressed'
99
+ stressed_count = stress_labels.count('stressed')
100
+ not_stressed_count = stress_labels.count('not stressed')
101
+
102
+ # Determine which label occurred more frequently
103
+ if stressed_count > not_stressed_count:
104
+ most_frequent_label = 'stressed'
105
+ else:
106
+ most_frequent_label = 'not stressed'
107
+
108
+ return stressed_count, not_stressed_count, most_frequent_label
109
+
110
+ def gradio_interface(video, duration):
111
+ stressed_count, not_stressed_count, most_frequent_label = stress(video, duration)
112
+ return f"Stressed frames: {stressed_count}", f"Not stressed frames: {not_stressed_count}", f"Most frequent state: {most_frequent_label}"
113
+
114
+ # Define Gradio interface for Heart and Stress Measurement
115
+ gr.Interface(
116
+ fn=gradio_interface,
117
+ inputs=[gr.Video(label="Upload a video file"), gr.Number(value=30, label="Duration (seconds)")],
118
+ outputs="json",
119
+ title="Heart Rate and Stress Measurement"
120
+ ).launch(server_name="0.0.0.0")
121
+