Spaces:
Runtime error
Runtime error
Added app.py
Browse files
app.py
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import keras
|
3 |
+
import cv2
|
4 |
+
from tensorflow.keras.preprocessing.image import img_to_array
|
5 |
+
from tensorflow.keras.preprocessing.image import load_img
|
6 |
+
from tensorflow.keras.applications.mobilenet_v2 import preprocess_input
|
7 |
+
import numpy as np
|
8 |
+
from PIL import Image
|
9 |
+
|
10 |
+
def modelpred(img):
|
11 |
+
#Loading the saved model
|
12 |
+
frame=cv2.imread(img)
|
13 |
+
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
|
14 |
+
faces = faceCascade.detectMultiScale(gray,scaleFactor=1.1,minNeighbors=5,minSize=(60, 60),flags=cv2.CASCADE_SCALE_IMAGE)
|
15 |
+
faces_list=[]
|
16 |
+
preds=[]
|
17 |
+
locs=[]
|
18 |
+
label_list=[]
|
19 |
+
for (x, y, w, h) in faces:
|
20 |
+
face_frame = frame[y:y+h,x:x+w]
|
21 |
+
face_frame = cv2.cvtColor(face_frame, cv2.COLOR_BGR2RGB)
|
22 |
+
face_frame = cv2.resize(face_frame, (224, 224))
|
23 |
+
face_frame = img_to_array(face_frame)
|
24 |
+
face_frame = np.expand_dims(face_frame, axis=0)
|
25 |
+
face_frame = preprocess_input(face_frame)
|
26 |
+
(startX, startY, endX, endY) = (x,y,x+w,y+h)
|
27 |
+
box=(startX, startY, endX, endY)
|
28 |
+
locs.append(box)
|
29 |
+
p=model.predict(face_frame)
|
30 |
+
preds.append(p)
|
31 |
+
|
32 |
+
for (box,pred) in zip(locs,preds):
|
33 |
+
(startX, startY, endX, endY) = box
|
34 |
+
(withoutMask,mask,notproper) = pred[0]
|
35 |
+
# print(pred)
|
36 |
+
|
37 |
+
# determine the class label and color we'll use to draw
|
38 |
+
# the bounding box and text
|
39 |
+
if (mask > withoutMask and mask>notproper):
|
40 |
+
label = "Great you have worn the mask correctly"
|
41 |
+
elif ( withoutMask > notproper):
|
42 |
+
label = "Please wear a mask"
|
43 |
+
else:
|
44 |
+
label = "Please wear the mask properly"
|
45 |
+
|
46 |
+
if label == "Great you have worn the mask correctly":
|
47 |
+
color = (0, 255, 0)
|
48 |
+
elif label=="Please wear a mask":
|
49 |
+
color = (0, 0, 255)
|
50 |
+
else:
|
51 |
+
color = (255, 140, 0)
|
52 |
+
|
53 |
+
|
54 |
+
# include the probability in the label
|
55 |
+
label = "{}: {:.2f}%".format(label,max(mask, withoutMask, notproper) * 100)
|
56 |
+
label_list.append(label)
|
57 |
+
# display the label and bounding box rectangle on the output
|
58 |
+
# frame
|
59 |
+
cv2.putText(frame, label, (startX, startY - 10),cv2.FONT_HERSHEY_SIMPLEX, 0.45, color, 2)
|
60 |
+
cv2.rectangle(frame, (startX, startY), (endX, endY), color, 2)
|
61 |
+
|
62 |
+
# Display the resulting frame
|
63 |
+
|
64 |
+
# You may need to convert the color.
|
65 |
+
img = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
|
66 |
+
im_pil = Image.fromarray(img)
|
67 |
+
return (im_pil,label_list)
|
68 |
+
imgpath=gr.inputs.Image(type="filepath")
|
69 |
+
# webcam=gr.inputs.Image(source="webcam",type="filepath",optional=True)
|
70 |
+
iface = gr.Interface(fn=modelpred, inputs=imgpath, outputs=[gr.outputs.Image(type="pil"),"text"],title="Face Mask Detection using Deep Neural Networks",description="""Implementation of an efficient neural network to detect and differentiate between people with high accuracy into 3 classes - those who have correctly worn face masks, those who have worn masks incorrectly and those who have not worn them. Implemented and fine tuned a MobileNetV2 network for this task and achieved an accuracy of 92.02%.""",allow_flagging="never",live=False,examples=[["images-3.jpeg"],["power-family-with-father-mother-daughter-wearing-medical-face-mask-protect-2019-ncov-covid-19-corona-virus-stay-home-concept_73622-1419.jpg"],["3000-2.jpg"]])
|
71 |
+
model = keras.models.load_model('FMf5model-062.model')
|
72 |
+
faceCascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_alt2.xml')
|
73 |
+
iface.launch(debug=True)
|