import streamlit as st from PIL import Image import face_recognition import cv2 import numpy as np import os from datetime import datetime st.title("AIMLJan24 - Face Recognition") # Load images for face recognition Images = [] classnames = [] directory = "photos" myList = os.listdir(directory) st.write("Photographs found in folder : ") for cls in myList: if os.path.splitext(cls)[1] in [".jpg", ".jpeg"]: img_path = os.path.join(directory, cls) curImg = cv2.imread(img_path) Images.append(curImg) st.write(os.path.splitext(cls)[0]) classnames.append(os.path.splitext(cls)[0]) # Load images for face recognition encodeListknown = [face_recognition.face_encodings(img)[0] for img in Images] # camera to take photo of user in question file_name = st.file_uploader("Upload image") def add_attendance(name): username = name current_datetime = datetime.now().strftime("%Y-%m-%d %H:%M:%S") print(current_datetime) if not os.path.isdir('Attendance'): os.makedirs('Attendance') if f'Attendance-{current_datetime}.csv' not in os.listdir('Attendance'): with open(f'Attendance/Attendance-{current_datetime}.csv', 'w') as f: f.write('Name,Time') if file_name is not None: col1, col2 = st.columns(2) test_image = Image.open(file_name) image = np.asarray(test_image) imgS = cv2.resize(image, (0, 0), None, 0.25, 0.25) imgS = cv2.cvtColor(imgS, cv2.COLOR_BGR2RGB) facesCurFrame = face_recognition.face_locations(imgS) encodesCurFrame = face_recognition.face_encodings(imgS, facesCurFrame) # List to store recognized names for all faces in the image recognized_names = [] # Checking if faces are detected if len(encodesCurFrame) > 0: for encodeFace, faceLoc in zip(encodesCurFrame, facesCurFrame): # Assuming that encodeListknown is defined and populated in your code matches = face_recognition.compare_faces(encodeListknown, encodeFace) faceDis = face_recognition.face_distance(encodeListknown, encodeFace) # Initialize name as Unknown name = "Unknown" # Check if there's a match with known faces if True in matches: matchIndex = np.argmin(faceDis) name = classnames[matchIndex].upper() # Append recognized name to the list recognized_names.append(name) # Draw rectangle and label on the image y1, x2, y2, x1 = faceLoc y1, x2, y2, x1 = (y1 * 4), (x2 * 4), (y2 * 4) ,(x1 * 4) # Make a copy of the image array before drawing on it image_copy = image.copy() cv2.rectangle(image_copy, (x1, y1), (x2, y2), (0, 255, 0), 2) cv2.rectangle(image_copy, (x1, y2 - 35), (x2, y2), (0, 255, 0), cv2.FILLED) cv2.putText(image_copy, name, (x1 + 6, y2 - 6), cv2.FONT_HERSHEY_COMPLEX, 1, (255, 255, 255), 2) # Update the database add_attendance(name) # Display the image with recognized names st.image(image_copy, use_column_width=True, output_format="PNG") # Display recognized names st.write("Recognized Names:") for i, name in enumerate(recognized_names): st.write(f"Face {i+1}: {name}") else: st.warning("No faces detected in the image. Face recognition failed.")