﻿
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Runtime.InteropServices;
using YangpuSign.Application.Service.Face;
namespace YangpuSign.Application.Service.Face
{
    public class Arcsoft_Face_Action : ASFFunctions, IEnginePoor
    {
        //********** 参数 **********
        public string AppID { get; }
        public string AppKey { get; }

        public int FaceEngineNums { get; set; }
        public int IDEngineNums { get; set; }
        public int AIEngineNums { get; set; }
        public ConcurrentQueue<IntPtr> FaceEnginePoor { get; set; }
        public ConcurrentQueue<IntPtr> IDEnginePoor { get; set; }
        public ConcurrentQueue<IntPtr> AIEnginePoor { get; set; }

        //********** 通用操作 **********
        public Arcsoft_Face_Action(string appId, string appKey)
        {
            int retCode = -1;
            try
            {
                retCode = ASFOnlineActivation(appId, appKey);
                if (retCode == 0)
                {
                    //LogHelper.LogInfo("SDK激活成功！");
                }
                else if (retCode == 90114)
                {
                    //LogHelper.LogInfo("SDK已激活！");
                }
                else
                {
                    throw new Exception("SDK激活失败，错误码：" + retCode);
                }
                AppID = appId;
                AppKey = appKey;
            }
            catch (Exception ex)
            {
                throw new Exception($"Arcsoft_Face_Action 初始化失败，异常：{ex.Message}");
            }
        }

        public static IntPtr InitASFEnginePtr(int faceMask, bool isImageMode = true)
        {
            IntPtr pEngines = IntPtr.Zero;
            int retCode = -1;
            try
            {
                if (isImageMode)
                {
                    //retCode = ASFInitEngine(ASF_DetectMode.ASF_DETECT_MODE_IMAGE, ArcSoftFace_OrientPriority.ASF_OP_0_HIGHER_EXT, ParmsBestPractice.detectFaceScaleVal_Image, ParmsBestPractice.detectFaceMaxNum, faceMask, ref pEngines);
                    retCode = ASFInitEngine(DetectionMode.ASF_DETECT_MODE_IMAGE, ASF_OrientPriority.ASF_OP_ALL_OUT, ParmsBestPractice.detectFaceScaleVal_Image, ParmsBestPractice.detectFaceMaxNum, faceMask, ref pEngines);

                }
                else
                {
                    retCode = ASFInitEngine(ASF_DetectMode.ASF_DETECT_MODE_VIDEO, ASF_OrientPriority.ASF_OP_ALL_OUT, ParmsBestPractice.detectFaceScaleVal_Video, ParmsBestPractice.detectFaceMaxNum, faceMask, ref pEngines);
                }
                if (retCode == 0)
                {

                }
                else
                {
                    throw new Exception("SDK初始化失败，错误码：" + retCode);
                }
                return pEngines;
            }
            catch (Exception ex)
            {
                throw new Exception("ASFFunctions->ASFFunctions, generate exception as: " + ex);
            }
        }

        public static IntPtr PutFeatureByteIntoFeatureIntPtr(byte[] data)
        {
            try
            {
                if (data.Length > 0)
                {
                    ASF_FaceFeature localFeature = new ASF_FaceFeature();
                    localFeature.featureSize = data.Length;
                    localFeature.feature = Marshal.AllocHGlobal(localFeature.featureSize);
                    Marshal.Copy(data, 0, localFeature.feature, data.Length);
                    IntPtr intPtrFeature = Marshal.AllocHGlobal(Marshal.SizeOf<ASF_FaceFeature>());
                    Marshal.StructureToPtr(localFeature, intPtrFeature, false);
                    return intPtrFeature;
                }
                else
                {
                    return IntPtr.Zero;
                }
            }
            catch
            {
                return IntPtr.Zero;
            }
        }

        public static Tuple<bool, IntPtr, string> TryExtractSingleFaceFeature(MemoryStream stream, int limitSize, IntPtr engine)
        {
            long fileLimit = limitSize * 1024 * 1024;
            IntPtr facePtr = IntPtr.Zero;
            List<byte[]> facesFeature = new List<byte[]>();
            string faceDetectError = null;
            try
            {
                if (stream.Length > fileLimit)
                {
                    throw new Exception($"文件超过{limitSize}M，请压缩后重试！");
                }
                bool detectStatus = ExtractFeaturesFromMemoryStream(stream, engine, out facesFeature, out faceDetectError);
                if (detectStatus == false)
                {
                    throw new Exception(faceDetectError);
                }
                facePtr = PutFeatureByteIntoFeatureIntPtr(facesFeature[0]);
                return new Tuple<bool, IntPtr, string>(true, facePtr, null);
            }
            catch (Exception ex)
            {
                Marshal.FreeHGlobal(facePtr);
                return new Tuple<bool, IntPtr, string>(false, IntPtr.Zero, ex.Message);
            }
        }


        public static byte[] TryExtractSingleFaceFeature(MemoryStream stream, IntPtr engine)
        {
            List<byte[]> facesFeature = new List<byte[]>();
            string faceDetectError = null;
            try
            {
                bool detectStatus = ExtractFeaturesFromMemoryStream(stream, engine, out facesFeature, out faceDetectError);
                if (detectStatus == false)
                {
                    throw new Exception(faceDetectError);
                }
                return facesFeature[0];
            }
            catch (Exception ex)
            {
                throw new Exception(ex.Message);
            }
        }

        //********** 人脸识别相关操作 **********
        public static ASF_MultiFaceInfo DetectMultipleFace(IntPtr pEngine, ImageInfo imageInfo)
        {
            ASF_MultiFaceInfo multiFaceInfo = new ASF_MultiFaceInfo();
            IntPtr pMultiFaceInfo = Marshal.AllocHGlobal(Marshal.SizeOf<ASF_MultiFaceInfo>());
            try
            {
                int detectRes = ASFDetectFaces(pEngine, imageInfo.width, imageInfo.height, imageInfo.format, imageInfo.imgData, pMultiFaceInfo, ASF_DetectModel.ASF_DETECT_MODEL_RGB);

                if (detectRes == 0 && multiFaceInfo.faceNum > 0)
                {
                    // 检查第一个检测到的人脸是否完整
                    bool isComplete = IsCompleteFace(multiFaceInfo.faceRects, imageInfo.width, imageInfo.height);
                    if (!isComplete)
                    {
                        // 人脸不完整，进行相应处理
                        // ...
                    }
                }

                multiFaceInfo = Marshal.PtrToStructure<ASF_MultiFaceInfo>(pMultiFaceInfo);
                return multiFaceInfo;
            }
            catch
            {
                return multiFaceInfo;
            }
            finally
            {
                Marshal.FreeHGlobal(pMultiFaceInfo);
            }
             
        }

        public static bool ExtractFeaturesFromMemoryStream(Stream ms, IntPtr engine, out List<byte[]> facesFeature, out string errorString)
        {
            facesFeature = new List<byte[]>();
            errorString = null;
            try
            {
                ImageInfo imageInfo = new ImageInfo();
                ASF_MultiFaceInfo facesInfo = new ASF_MultiFaceInfo();
                imageInfo = ImageHelper.ReadBMPFormStream(ms);
                facesInfo = DetectMultipleFace(engine, imageInfo);
                if (facesInfo.faceNum == 0)
                {
                    errorString = "需将人脸靠近镜头";
                    return false;
                }
                //if (facesInfo.faceNum > 1)
                //{
                //    errorString = "检测到多张人脸，请重新输入！";
                //    return false;
                //}
                facesFeature = ExtractAllFeatures(engine, imageInfo, facesInfo);
                return true;
            }
            catch
            {
                errorString = "算法错误，请检查输入后重试！";
                return false;
            }
            finally
            {
                GC.Collect();
            }
        }

        public static List<byte[]> ExtractAllFeatures(IntPtr pEngine, ImageInfo imageInfo, ASF_MultiFaceInfo multiFaceInfo)
        {
            try
            {
                ASF_SingleFaceInfo singleFaceInfo = new ASF_SingleFaceInfo();
                List<byte[]> results = new List<byte[]>();
                int maxArea = 0;
                int i = -1;
                for (int index = 0; index < multiFaceInfo.faceNum; index++)
                {
                    try
                    {
                        MRECT rect = Marshal.PtrToStructure<MRECT>(multiFaceInfo.faceRects + Marshal.SizeOf<MRECT>() * index);
                        int area = (rect.right - rect.left) * (rect.bottom - rect.top);
                        if (maxArea <= area)
                        {
                            maxArea = area;
                            i = index;
                        }
                    }
                    catch (Exception ex)
                    {
                        Console.WriteLine(ex.Message);
                    }
                }
                if (i != -1)
                {
                    singleFaceInfo.faceRect = Marshal.PtrToStructure<MRECT>(multiFaceInfo.faceRects + Marshal.SizeOf<MRECT>() * i);
                    singleFaceInfo.faceOrient = Marshal.PtrToStructure<int>(multiFaceInfo.faceOrients + Marshal.SizeOf<int>() * i);
                    byte[] singleFaceFeature = ExtractSingleFaceFeature(pEngine, imageInfo, singleFaceInfo.faceRect, singleFaceInfo.faceOrient);
                    if (singleFaceFeature != null)
                    {
                        results.Add(singleFaceFeature);
                    }
                }
                return results;
            }
            catch (Exception ex)
            {
                throw new Exception("Arcsoft_Face_Action-->ExtractAllFeatures exception " + ex);
            }
            finally
            {
                Marshal.FreeHGlobal(imageInfo.imgData);
            }
        }

        private static byte[] ExtractSingleFaceFeature(IntPtr pEngine, ImageInfo imageInfo, MRECT rect, int faceOrient)
        {
            var singleFaceInfo = new ASF_SingleFaceInfo();
            singleFaceInfo.faceRect = rect;
            singleFaceInfo.faceOrient = faceOrient;
            IntPtr pSingleFaceInfo = Marshal.AllocHGlobal(Marshal.SizeOf<ASF_SingleFaceInfo>());
            Marshal.StructureToPtr(singleFaceInfo, pSingleFaceInfo, false);
            IntPtr pFaceFeature = Marshal.AllocHGlobal(Marshal.SizeOf<ASF_FaceFeature>());
            try
            {
                int retCode = ASFFaceFeatureExtract(pEngine, imageInfo.width, imageInfo.height, imageInfo.format, imageInfo.imgData, pSingleFaceInfo, pFaceFeature);
                if (retCode == 0)
                {
                    ASF_FaceFeature faceFeature = Marshal.PtrToStructure<ASF_FaceFeature>(pFaceFeature);
                    byte[] feature = new byte[faceFeature.featureSize];
                    Marshal.Copy(faceFeature.feature, feature, 0, faceFeature.featureSize);
                    return feature;
                }
                if (retCode == 81925)
                {
                    throw new Exception("人脸特征检测结果置信度低!");
                }
                else
                {
                    return null;
                }
            }
            catch (Exception ex)
            {
                throw new Exception($"Arcsoft_Face_Action-->ExtractSingleFaceFeature exception: {ex.Message}");
            }
            finally
            {
                Marshal.FreeHGlobal(pSingleFaceInfo);
                Marshal.FreeHGlobal(pFaceFeature);
            }
        }


        //********** 活体检测相关操作 **********
        public static Tuple<bool, string> IsAliveFace(MemoryStream stream, int limitSize, IntPtr engine)
        {
            long fileLimit = limitSize * 1024 * 1024;
            int aliveMask = FaceEngineMask.ASF_LIVENESS;
            IntPtr singleFaceInfo = Marshal.AllocHGlobal(Marshal.SizeOf<ASF_MultiFaceInfo>());
            IntPtr pLivenessInfo = Marshal.AllocHGlobal(Marshal.SizeOf<ASF_LivenessInfo>());
            ASF_MultiFaceInfo multiFaceInfo = new ASF_MultiFaceInfo();
            ASF_LivenessInfo livenessInfo = new ASF_LivenessInfo();
            ImageInfo imageInfo = new ImageInfo();

            try
            {
                if (stream.Length > fileLimit)
                {
                    throw new Exception($"文件超过{limitSize}M，请压缩后重试！");
                }
                imageInfo = ImageHelper.ReadBMPFormStream(stream);
                multiFaceInfo = DetectMultipleFace(engine, imageInfo);
                if (multiFaceInfo.faceNum != 1)
                {
                    throw new Exception($"识别人数不为1，此功能仅支持单人模式！");
                }
                Marshal.StructureToPtr(multiFaceInfo, singleFaceInfo, false);
                int retCode_Process = ASFProcess(engine, imageInfo.width, imageInfo.height, imageInfo.format, imageInfo.imgData, singleFaceInfo, aliveMask);
                if (retCode_Process != 0)
                {
                    throw new Exception($"活体检验失败，错误码 {retCode_Process}");
                }
                int retCode_Score = ASFGetLivenessScore(engine, pLivenessInfo);
                if (retCode_Score != 0)
                {
                    throw new Exception($"获取活体检测结果失败，错误码 {retCode_Score}");
                }
                livenessInfo = Marshal.PtrToStructure<ASF_LivenessInfo>(pLivenessInfo);
                if (livenessInfo.num != 1)
                {
                    throw new Exception($"活体检测为非单人结果，实际检测到人数{livenessInfo.num}");
                }
                int result = Marshal.PtrToStructure<int>(livenessInfo.isLive);
                if (result == 1)
                {
                    return new Tuple<bool, string>(true, "活人");
                }
                else if (result == 0)
                {
                    return new Tuple<bool, string>(false, "假人");
                }
                else if (result == -1)
                {
                    return new Tuple<bool, string>(false, "不确定，算法拒判");
                }
                return new Tuple<bool, string>(false, $"需将人脸靠近镜头！");
            }
            catch (Exception ex)
            {
                return new Tuple<bool, string>(false, ex.Message);
            }
            finally
            {
                Marshal.FreeHGlobal(singleFaceInfo);
                Marshal.FreeHGlobal(pLivenessInfo);
                Marshal.FreeHGlobal(imageInfo.imgData);
                GC.Collect();
            }
        }

        //********** 引擎池相关操作 **********
        private int InitEnginePool()
        {
            try
            {
                for (int index = 0; index < FaceEngineNums; index++)
                {
                    IntPtr enginePtr = IntPtr.Zero;
                    Arcsoft_Face_Action faceAction = new Arcsoft_Face_Action(AppID, AppKey);
                    enginePtr = InitASFEnginePtr(ParmsBestPractice.faceEnhencementMask);
                    PutEngine(FaceEnginePoor, enginePtr);
                    Console.WriteLine($"FaceEnginePoor add {enginePtr}");
                }
                for (int index = 0; index < IDEngineNums; index++)
                {
                    IntPtr enginePtr = IntPtr.Zero;
                    Arcsoft_Face_Action faceAction = new Arcsoft_Face_Action(AppID, AppKey);
                    enginePtr = InitASFEnginePtr(ParmsBestPractice.faceBaseMask);
                    PutEngine(IDEnginePoor, enginePtr);
                    Console.WriteLine($"IDEnginePoor add {enginePtr}");
                }
                for (int index = 0; index < AIEngineNums; index++)
                {
                    IntPtr enginePtr = IntPtr.Zero;
                    Arcsoft_Face_Action faceAction = new Arcsoft_Face_Action(AppID, AppKey);
                    enginePtr = InitASFEnginePtr(ParmsBestPractice.aiMask);
                    PutEngine(AIEnginePoor, enginePtr);
                    Console.WriteLine($"AIEnginePoor add {enginePtr}");
                }
                return 0;
            }
            catch (Exception ex)
            {
                throw new Exception($"InitEnginePool--> exception {ex}");
            }
        }

        public IntPtr GetEngine(ConcurrentQueue<IntPtr> queue)
        {
            IntPtr item = IntPtr.Zero;
            if (queue.TryDequeue(out item))
            {
                return item;
            }
            else
            {
                return IntPtr.Zero;
            }
        }

        public void PutEngine(ConcurrentQueue<IntPtr> queue, IntPtr item)
        {
            if (item != IntPtr.Zero)
            {
                queue.Enqueue(item);
            }
        }

        public void Arcsoft_EnginePool(int faceEngineNums = 1, int idEngineNums = 0, int aiEngineNums = 0)
        {
            FaceEnginePoor = new ConcurrentQueue<IntPtr>();
            IDEnginePoor = new ConcurrentQueue<IntPtr>();
            AIEnginePoor = new ConcurrentQueue<IntPtr>();
            try
            {
                FaceEngineNums = faceEngineNums;
                IDEngineNums = idEngineNums;
                AIEngineNums = aiEngineNums;
                int status = InitEnginePool();
                if (status != 0)
                {
                    throw new Exception("引擎池初始化失败！");
                }
            }
            catch (Exception ex)
            {
                throw new Exception($"ArcSoft_EnginePool-->ArcSoft_EnginePool exception as: {ex}");
            }
        }


        /// <summary>
        /// 检查是否为完整人脸
        /// </summary>
        /// <param name="faceRect">人脸框信息</param>
        /// <param name="imageHeight">图像高度</param>
        /// <param name="imageWidth">图像宽度</param>
        /// <returns>是否为完整人脸</returns>
        public static bool IsCompleteFace(MRECT faceRect, int imageHeight, int imageWidth)
        {
            // 计算人脸框与图像边界的距离比例
            float topMarginRatio = (float)faceRect.top / imageHeight;
            float bottomMarginRatio = (float)(imageHeight - faceRect.bottom) / imageHeight;
            float leftMarginRatio = (float)faceRect.left / imageWidth;
            float rightMarginRatio = (float)(imageWidth - faceRect.right) / imageWidth;

            // 计算人脸框的宽高比
            float faceAspectRatio = (float)(faceRect.right - faceRect.left) / (faceRect.bottom - faceRect.top);

            // 设置阈值
            const float MIN_TOP_MARGIN_RATIO = 0.05f;    // 上边距最小比例，确保包含额头
            const float MIN_MARGIN_RATIO = 0.02f;        // 其他边距最小比例
            const float MIN_ASPECT_RATIO = 0.7f;         // 最小宽高比
            const float MAX_ASPECT_RATIO = 1.3f;         // 最大宽高比

            // 检查各项指标
            bool hasEnoughMargin = topMarginRatio >= MIN_TOP_MARGIN_RATIO &&
                                  bottomMarginRatio >= MIN_MARGIN_RATIO &&
                                  leftMarginRatio >= MIN_MARGIN_RATIO &&
                                  rightMarginRatio >= MIN_MARGIN_RATIO;

            bool hasValidAspectRatio = faceAspectRatio >= MIN_ASPECT_RATIO &&
                                      faceAspectRatio <= MAX_ASPECT_RATIO;

            return hasEnoughMargin && hasValidAspectRatio;
        }
    }

    public struct ParmsBestPractice
    {
        //VIDEO模式取值范围[2,32]，推荐值为16
        public const int detectFaceScaleVal_Video = 16;

        //MAGE模式取值范围[2,32]，推荐值为30
        public const int detectFaceScaleVal_Image = 32;

        //最大需要检测的人脸个数，取值范围[1,50]
        public const int detectFaceMaxNum = 50;

        //人脸识别最基本功能。
        public const int faceBaseMask = FaceEngineMask.ASF_FACE_DETECT | FaceEngineMask.ASF_FACERECOGNITION;

        //人脸识别增强功能（人脸基本 + RGB活体检测 + 人脸3D角度检测）。
        public const int faceEnhencementMask = faceBaseMask | FaceEngineMask.ASF_LIVENESS | FaceEngineMask.ASF_FACE3DANGLE;

        //AI算命功能（人脸基本 + 年龄 + 性别）。
        public const int aiMask = faceBaseMask | FaceEngineMask.ASF_AGE | FaceEngineMask.ASF_GENDER;
    }
}
