import shutil
import cv2
import os
import dlib
import numpy as np
import psycopg2

def extract_face_features(dataset_path):
    predictor_path = "E:\\shape_predict\\shape_predictor_68_face_landmarks.dat"
    face_rec_model_path = "E:\\facial recognition\\dlib_face_recognition_resnet_model_v1.dat"
    prototxt_path = 'E:\\opencv\\opencv-master\\samples\\dnn\\face_detector\\deploy.prototxt'
    model_path = 'E:\\facial recognition\\facial_recognition-master\\res10_300x300_ssd_iter_140000.caffemodel'
    net = cv2.dnn.readNetFromCaffe(prototxt_path, model_path)
    sp = dlib.shape_predictor(predictor_path)
    facerec = dlib.face_recognition_model_v1(face_rec_model_path)

    image = cv2.imread(dataset_path)
    image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
    (h, w) = image.shape[:2]
    blob = cv2.dnn.blobFromImage(cv2.resize(image, (300, 300)), 1.0,
                                 (300, 300), (104.0, 177.0, 123.0))
    net.setInput(blob)
    detections = net.forward()
    startX, startY, endX, endY = 0, 0, 0, 0
    for i in range(0, detections.shape[2]):
        confidence = detections[0, 0, i, 2]
        if confidence > 0.5:
            box = detections[0, 0, i, 3:7] * np.array([w, h, w, h])
            (startX, startY, endX, endY) = box.astype("int")
            break
    rect = dlib.rectangle(startX, startY, endX, endY)
    shape = sp(image, rect)
    face_descriptor = facerec.compute_face_descriptor(image, shape)
    v = np.array(face_descriptor)
    return v

def euclidean_distance(a, b):
    return np.linalg.norm(a - b)

def compute_similarity(query_vector, database_vector):
    similarity = euclidean_distance(query_vector, database_vector)
    return similarity

if __name__ == '__main__':
    conn = psycopg2.connect(
        host="YOUR_HOST",
        port="YOUR_PORT",
        user="YOUR_USER",
        password="YOUR_PASSWORD",
        database="YOUR_DATABASE"
    )
    cursor = conn.cursor()

    dataset_path = "E:\\IMDB\\search\\bl16.jpg"
    query_vector = extract_face_features(dataset_path)

    dataset_path = "E:\\IMDB\\wiki1"
    cursor.execute("SELECT id, vector FROM feature_vectors;")
    id_vector_list = cursor.fetchall()
    vectors_array = np.array([np.fromstring(vector[1][1:-1], sep=',') for id_, vector in id_vector_list])

    for id_, vector in id_vector_list:
        similarity = compute_similarity(query_vector, np.fromstring(vector[1][1:-1], sep=','))
        update_query = "UPDATE feature_vectors SET distance = %s WHERE id = %s;"
        cursor.execute(update_query, (similarity, id_))
        conn.commit()

    destination_folder = "E:\\IMDB\\search_DLIB_20"
    if not os.path.exists(destination_folder):
        os.makedirs(destination_folder)

    cursor.execute("SELECT filename FROM feature_vectors ORDER BY distance ASC LIMIT 20;")
    search_filename_list = cursor.fetchall()

    for filename in search_filename_list:
        image_path = os.path.join(dataset_path, filename[0])
        destination_path = os.path.join(destination_folder, filename[0])
        shutil.copy(image_path, destination_path)
        print(f"Moved {filename[0]} to {destination_folder}")

    cursor.close()
    conn.close()