//
// Created by buk on 8/29/24.
//
#include <future>
#include <fstream>
#include <stdexcept>
#include <opencv2/opencv.hpp>
#include "spdlog/spdlog.h"

#include "../../inc/arcsoft_face_sdk.h"
#include "../../inc/amcomdef.h"
#include "../../inc/asvloffscreen.h"
#include "../../inc/merror.h"

#include "ai_service.h"
#include "../base/system_config.h"
#include "../base/utils.h"

#define APPID "6v2wZSPMmzbEYQNHqooHFS7iuByHgyAxPoSK7KS9Maz9"
#define SDKKEY "CSL6ULxtXfn6Nu3kaFDwf4F93t4Djfg8NciWXHZAkhTv"

#define NSCALE 16
#define FACENUM 5

#define SafeFree(p)  \
    {                \
        if ((p))     \
            free(p); \
        (p) = NULL;  \
    }
#define SafeArrayDelete(p) \
    {                      \
        if ((p))           \
            delete[] (p);  \
        (p) = NULL;        \
    }
#define SafeDelete(p)   \
    {                   \
        if ((p))        \
            delete (p); \
        (p) = NULL;     \
    }

namespace ai::facial_recognition
{

    cv::Mat prepare_nv21_image(int width, int height, cv::Mat &frame1)
    {
        // 检查输入参数的有效性
        if (width <= 0 || height <= 0)
        {
            throw std::invalid_argument("Width and height must be positive.");
        }

        cv::Size windowSize(width, height);

        // 调整图像大小以适应窗口
        cv::Mat resizedFrame;
        cv::resize(frame1, resizedFrame, windowSize);

        // 创建一个空的NV21格式图片
        cv::Mat nv21Image(height + height / 2, width, CV_8UC1);

        // 将BGR图片转换为NV21图片
        cv::cvtColor(resizedFrame, nv21Image, cv::ColorConversionCodes::COLOR_BGR2YUV_I420);

        return nv21Image;
    }

    /**
     * 将从图片中提取的特征值与数据库中保存的特征值做对比
     */
    void compare_face_feature(ai::facial_recognition::AIService *service, MHandle handle, ASF_FaceFeature feature1, RequestMessagePtr msg)
    {
        // convert face feature to string
        std::string str_feature = convert_unsigned_char_pointer_to_hex(feature1.feature,
                                                                       feature1.featureSize);
        service->_data_store->save_face_feature(str_feature, feature1.featureSize);

        ASF_FaceFeature copyfeature1 = {nullptr};
        // 拷贝feature，否则第二次进行特征提取，会覆盖第一次特征提取的数据，导致比对的结果为1
        copyfeature1.featureSize = feature1.featureSize;
        copyfeature1.feature = (MByte *)malloc(feature1.featureSize);
        memset(copyfeature1.feature, 0, feature1.featureSize);
        memcpy(copyfeature1.feature, feature1.feature, feature1.featureSize);

        // 获取sqlite中保存的所有人脸特征值
        auto face_feature_infos = service->_data_store->get_all_face_features();
        for (auto info : face_feature_infos)
        {
            ASF_FaceFeature existed_feature = {nullptr};
            existed_feature.featureSize = info.face_feature_size;
            existed_feature.feature = (MByte *)malloc(info.face_feature_size);
            memset(existed_feature.feature, 0, existed_feature.featureSize);
            convert_hex_str_to_char(info.face_feature, existed_feature.feature);

            // 单人脸特征比对
            MFloat confidenceLevel;
            auto res = ASFFaceFeatureCompare(handle, &copyfeature1, &existed_feature, &confidenceLevel);
            if (res != MOK)
                spdlog::error("ASFFaceFeatureCompare fail: {0:d}.", res);
            else if (confidenceLevel > 0.75) {
                spdlog::info("ASFFaceFeatureCompare success: {0:d}, {1:s}, {2:f}.", info.member_id,
                             info.member_name, confidenceLevel);
                // 保存识别照片
                msg->member_id = info.member_id;
                msg->member_name = info.member_name;
                msg->confidence_level = confidenceLevel;
            }
                

            // 释放内存
            SafeFree(existed_feature.feature);
        }
        SafeFree(copyfeature1.feature);
    }

    void exec_image_facial_recognition(ai::facial_recognition::AIService *service, cv::Mat &frame1, RequestMessagePtr msg)
    {
        auto handle = service->engine_pool.getEngineHandle();
        try
        {
            const int width =  frame1.cols - (4 - frame1.cols % 4);
            const int height = frame1.rows - (2 - frame1.rows % 2);

            cv::Mat nv21Image = std::move(prepare_nv21_image(width, height, frame1));

            // 转换图片存储格式
            // 计算图片内存长度
            u_long img_length = nv21Image.total() * nv21Image.channels();

            auto *imageData1 = new unsigned char[img_length]();
            std::memcpy(imageData1, nv21Image.ptr<unsigned char>(0), img_length * sizeof(unsigned char));

            ASVLOFFSCREEN offscreen1 = {0};
            ColorSpaceConversion(width, height, ASVL_PAF_NV21, imageData1, offscreen1);

            // 第一张人脸
            ASF_MultiFaceInfo detectedFaces1 = {nullptr};
            ASF_SingleFaceInfo SingleDetectedFaces = {0};
            ASF_FaceFeature feature1 = {nullptr};

            auto res = ASFDetectFacesEx(handle, &offscreen1, &detectedFaces1);

            if (res != MOK && detectedFaces1.faceNum > 0)
            {
                spdlog::debug("ASFDetectFaces 1 fail: {0:d}.", res);
                msg->context.put("errmsg", std::format("检测人脸失败: {}.", res));
            }
            else if (detectedFaces1.faceNum < 1)
            {
                spdlog::debug("ASFDetectFaces 1 faces number: {0:d}.", detectedFaces1.faceNum);
                msg->context.put("errmsg", std::format("未检测到人脸: {}, {}.", res, detectedFaces1.faceNum));
            }
            else
            {
                SingleDetectedFaces.faceRect.left = detectedFaces1.faceRect[0].left;
                SingleDetectedFaces.faceRect.top = detectedFaces1.faceRect[0].top;
                SingleDetectedFaces.faceRect.right = detectedFaces1.faceRect[0].right;
                SingleDetectedFaces.faceRect.bottom = detectedFaces1.faceRect[0].bottom;
                SingleDetectedFaces.faceOrient = detectedFaces1.faceOrient[0];

                // 单人脸特征提取
                res = ASFFaceFeatureExtractEx(handle, &offscreen1, &SingleDetectedFaces, &feature1);
                if (res != MOK)
                {
                    spdlog::debug("ASFFaceFeatureExtractEx 1 fail: {0:d}.", res);
                    msg->context.put("errmsg", std::format("人脸特征提取失败: {}.", res));
                }
                else
                {
                    spdlog::debug("ASFFaceFeatureExtractEx 1 success: {0:d}.", res);
                    spdlog::debug(" featureSize: {0:d}.", feature1.featureSize);

                    //  convert face feature to string
                    std::string str_feature = convert_unsigned_char_pointer_to_hex(feature1.feature,
                                                                                   feature1.featureSize);
                    msg->context.put("face_feature", str_feature);
                    msg->context.put<int>("face_feature_size", feature1.featureSize);

                    compare_face_feature(service, handle, feature1, msg);
                }
            }

            // 释放内存
            SafeArrayDelete(imageData1);
        }
        catch (std::exception &ex)
        {
            spdlog::error(ex.what());
        }

        service->engine_pool.releaseEngineHandle(handle);
    }

    void exec_video_facial_recognition(ai::facial_recognition::AIService *service)
    {
        spdlog::debug("entering exec_video_facial_recognition");

        cv::Mat frame1;
        auto msg = std::make_shared<RequestMessage>();
        

        SystemConfig system_config;
        auto logs_path = system_config.readKey<std::string>("log", "facial_recognition_logs_path", "");
        msg->facial_regcognition_logs_path = logs_path;

        while (service->rtsp_connected)
        {
            msg->member_id = 0;
            try
            {
                if (service->queue.try_pop(frame1)) {
                    exec_image_facial_recognition(service, frame1, msg);
                    if (msg->member_id > 0)
                    {
                        // 保存识别照片
                        msg->image_file_name = std::format("{}/{}.jpg", msg->facial_regcognition_logs_path, generate_uuid());
                        cv::imwrite(msg->image_file_name, frame1);

                        //回写识别结果到数据库
                    }
                    frame1.release();
                }
            }
            catch (std::exception &ex)
            {
                spdlog::error("exec_video_facial_recognition: {0:s}.", ex.what());
            }
            catch (...)
            {
                spdlog::error("exec_video_facial_recognition exception");
            }
        }

        spdlog::debug("exit exec_video_facial_recognition");
    }

    void exec_connect_rtsp_server(ai::facial_recognition::AIService *service, const std::string device_id, const std::string rtsp_url)
    {
        // CAP_FFMPEG：使用ffmpeg解码
        auto stream1 = cv::VideoCapture(rtsp_url, cv::CAP_FFMPEG);

        spdlog::debug("entering exec_connect_rtsp_server");

        cv::Mat frame1;

        // 获取开始时间点
        auto start = std::chrono::high_resolution_clock::now();

        while (service->rtsp_connected)
        {
            std::this_thread::sleep_for(std::chrono::milliseconds(50));
            if (!stream1.isOpened())
            {
                std::cout << "有视频流未打开, waiting for 30s" << std::endl;
                std::this_thread::sleep_for(std::chrono::seconds(30));
                stream1.release();
                std::this_thread::sleep_for(std::chrono::seconds(2));
                stream1.open(rtsp_url);
                continue;
            }

            if (!stream1.read(frame1))
            {
                std::cout << "有视频流未读取, waiting for 10s" << std::endl;

                std::this_thread::sleep_for(std::chrono::seconds(10));
                stream1.release();
                std::this_thread::sleep_for(std::chrono::seconds(2));
                stream1.open(rtsp_url);
                continue;
            }

            if (cv::waitKey(1) == 27)
            {
                break;
            }

            // 获取结束时间点
            auto end = std::chrono::high_resolution_clock::now();

            // 计算时间间隔
            std::chrono::duration<double, std::milli> elapsed = end - start;

            // 输出时间间隔
            // std::cout << "Elapsed time: " << elapsed.count() << " ms\n";
            if (elapsed.count() > 200)
            {
                // spdlog::debug("rstp_device: {0:s}, {1:s}.", device_id, rtsp_url);
                service->queue.try_push(frame1);

                start = end;
            }

            frame1.release();
        }

        if (stream1.isOpened())
        {
            stream1.release();
        }

        spdlog::debug("exit exec_connect_rtsp_server: {0:s},  {1:s}.", device_id, rtsp_url);
    }

    AIService::AIService()
    {
    }

    AIService::~AIService()
    {
    }

    void AIService::start()
    {
    }

    void AIService::stop()
    {
    }

    void AIService::extract_face_feature()
    {
    }

    void AIService::detect_faces()
    {
    }

    void AIService::onWebRequest(RequestMessagePtr msg)
    {
        msg->context.clear();
        msg->context.put<int>("errcode", 10000);
        msg->context.put("errmsg", "");

        auto handle = engine_pool.getEngineHandle();
        try
        {
            // 读取JPG图片
            auto file_name = msg->web_root + "/" + msg->image_file_name;
            cv::Mat frame1 = cv::imread(file_name, cv::IMREAD_COLOR);
            if (frame1.empty())
            {
                msg->context.put("errmsg", "图片读取失败");
                spdlog::error("图片{0}读取失败", file_name);
                return;
            }

            spdlog::debug("图片{0}读取完成", file_name);

            exec_image_facial_recognition(this, frame1, msg);
            // printf("\n************* Face Process *****************\n");
            // 设置活体置信度 SDK内部默认值为 IR：0.7  RGB：0.5（无特殊需要，可以不设置）
            //  ASF_LivenessThreshold threshold = {0};
            //  threshold.thresholdmodel_BGR = 0.5;
            //  threshold.thresholdmodel_IR = 0.7;
            //  res = ASFSetLivenessParam(handle, &threshold);
            //  if (res != MOK)
            //      printf("ASFSetLivenessParam fail: %ld\n", res);
            //  else
            //      printf("RGB Threshold: %f\nIR Threshold: %f\n", threshold.thresholdmodel_BGR,
            //             threshold.thresholdmodel_IR);

            // // 人脸信息检测
            // MInt32 processMask = ASF_AGE | ASF_GENDER | ASF_FACE3DANGLE | ASF_LIVENESS;
            // res = ASFProcessEx(handle, &offscreen1, &detectedFaces1, processMask);
            // if (res != MOK)
            //     printf("ASFProcessEx fail: %ld\n", res);
            // else {
            //     printf("ASFProcessEx success: %ld\n", res);

            //     // 获取年龄
            //     ASF_AgeInfo ageInfo = {nullptr};
            //     res = ASFGetAge(handle, &ageInfo);
            //     if (res != MOK)
            //         printf("ASFGetAge fail: %ld\n", res);
            //     else {
            //         printf("First face age: %d\n", ageInfo.ageArray[0]);
            //         msg->context.put<int>("age", ageInfo.ageArray[0]);
            //     }

            //     // 获取性别
            //     ASF_GenderInfo genderInfo = {nullptr};
            //     res = ASFGetGender(handle, &genderInfo);
            //     if (res != MOK)
            //         printf("ASFGetGender fail: %ld\n", res);
            //     else {
            //         printf("First face gender: %d\n", genderInfo.genderArray[0]);
            //         msg->context.put<int>("gender", genderInfo.genderArray[0]);
            //     }

            //     // 获取3D角度
            //     ASF_Face3DAngle angleInfo = {nullptr};
            //     res = ASFGetFace3DAngle(handle, &angleInfo);
            //     if (res != MOK)
            //         printf("ASFGetFace3DAngle fail: %ld\n", res);
            //     else {
            //         printf("First face 3dAngle: roll: %lf yaw: %lf pitch: %lf\n", angleInfo.roll[0], angleInfo.yaw[0],
            //                angleInfo.pitch[0]);
            //         msg->context.put<int>("face_angel_pitch", angleInfo.pitch[0]);
            //         msg->context.put<int>("face_angel_roll", angleInfo.roll[0]);
            //         msg->context.put<int>("face_angel_yaw", angleInfo.yaw[0]);
            //     }
            // }
        }
        catch (std::exception &ex)
        {
            spdlog::error(ex.what());
        }

        engine_pool.releaseEngineHandle(handle);

        msg->context.put<int>("errcode", 0);
    }

    void AIService::connect_rtsp()
    {
        this->rtsp_connected = true;

        // 启动一个识别线程
        device_thread_manager.threads.emplace_back(
            std::move(
                std::thread([this]()
                            { exec_video_facial_recognition(this); })));

        // get device list from sqlite
        auto device_infos = _data_store->get_all_devices();

        // 启动rtsp服务器连接线程，可以支持多个
        for (auto device_info : device_infos)
        {
            auto device_id = std::to_string(device_info.device_id);
            auto device_name = device_info.device_name;
            auto rtsp_url = device_info.rtsp_url;

            device_thread_manager.threads.emplace_back(
                std::move(
                    std::thread([this, device_id, rtsp_url]
                                { exec_connect_rtsp_server(this, device_id, rtsp_url); })));
        }
    }

    void AIService::disconnect_rtsp()
    {
        this->rtsp_connected = false;
        device_thread_manager.waitForAll();
    }

    void AIService::set_data_store(DataStore *data_store)
    {
        _data_store = data_store;
    }
}
