﻿using BaseDll;
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using Emgu.CV;
using Emgu.Util;
using Emgu.Util.TypeEnum;
using Emgu.CV.Structure;
using Emgu.CV.UI;
using System.Drawing;
namespace UserData
{
    class AffineTrans2D
    {
        public Mat hv_homMat2D = null;
        public void SetVectorToHom2D(double[] VX, double[] VY, double[] MX, double[] MY, ref Mat homMat2D)
        {
            try
            {

                PointF[] hv_SrcP = new PointF[VX.Length], hv_DstP = new PointF[VX.Length];
                // Initialize local and output iconic variables 
                for (int i = 0; i < VX.Length; i++)
                {
                    hv_SrcP[i].X = (float)VX[i]; hv_SrcP[i].Y = (float)VY[i];
                    hv_DstP[i].X = (float)MX[i]; hv_DstP[i].Y = (float)MY[i];
                }
                hv_homMat2D = CvInvoke.EstimateAffine2D(hv_SrcP, hv_DstP);
                homMat2D = hv_homMat2D;
                return;
            }
            catch (Exception ex)
            {
                string exxx = ex.Message;
                return;
            }
        }

        public XYUPoint GetAffineTransResult(XYUPoint CCDPix)
        {
            try
            {
                XYUPoint TargetPos = new XYUPoint();
                Image<Gray, float> img = hv_homMat2D.ToImage<Gray, float>();
                float A = img.Data[0, 0, 0];
                float B = img.Data[0, 1, 0];
                float C = img.Data[0, 2, 0];
                float D = img.Data[1, 0, 0];
                float E = img.Data[1, 1, 0];
                float F = img.Data[1, 2, 0];

                TargetPos.x = Convert.ToSingle(A * CCDPix.x + B * CCDPix.y + C);
                TargetPos.y = Convert.ToSingle(D * CCDPix.x + E * CCDPix.y + F);
                return TargetPos;
            }
            catch (Exception ex)
            {

                return new XYUPoint();
            }

        }
    }
    class 点胶对位_VisionData
    {

        public 点胶对位_VisionData()
        {
            XY_Disp_CCD_Clib = new AffineTrans2D();
            XY_Disp_CCD_Clib.hv_homMat2D = hv_homMat2D_Disp_CCD_Clib;
        }
        public Mat hv_homMat2D_Disp_CCD_Clib = new Mat();

        public AffineTrans2D XY_Disp_CCD_Clib
        {
            set;
            get;
        } 
        public XYUPoint 点胶对位_DispPin_DispCCD
        {
            set;
            get;
        }
        public XYUPoint 点胶对位_DispCCD_SenserCenter基准
        {
            set;
            get;
        }
        public XYUPoint 点胶对位_DispCCD_SenserCenter
        {
            set;
            get;
        }

    }

    class 组装定位_VisionData
    {
        public 组装定位_VisionData()
        {
            XY_上CCD_Clib = new AffineTrans2D();
            XY_上CCD_Clib.hv_homMat2D = hv_homMat2D_上CCD_Clib;
            XY_下CCD_Clib = new AffineTrans2D();
            XY_下CCD_Clib.hv_homMat2D = hv_homMat2D_下CCD_Clib;

        }
        public Mat hv_homMat2D_上CCD_Clib = new Mat();
        public Mat hv_homMat2D_下CCD_Clib = new Mat();

        public double d_上CCD_Angle = 0;
        public double d_下CCD_Angle = 0;
        public AffineTrans2D XY_上CCD_Clib
        {
            set;
            get;
        }
        public AffineTrans2D XY_下CCD_Clib
        {
            set;
            get;
        }
        public XYUPoint Point_旋转中心
        {
            set;
            get;
        }
        public XYUPoint 组装定位_CCD上相机_CCD下相机
        {
            set;
            get;
        }
        public XYUPoint 组装定位_CCD物料_下相机
        {
            set;
            get;
        }
        public XYUPoint 组装定位_CCD上相机_后壳
        {
            set;
            get;
        }
    }
    public class VisionDataMgr
    {
        private VisionDataMgr()
        {
        }
        private Dictionary<string, 点胶对位_VisionData> visionData = new Dictionary<string, 点胶对位_VisionData>();
        private static object obj = new object();
        private static VisionDataMgr visionDataMgr;
        /// <summary>
        /// 获取视觉数据管理句柄
        /// </summary>
        /// <returns></returns>
        public static VisionDataMgr GetInstance()
        {
            if (visionDataMgr == null)
            {
                lock (obj)
                {
                    visionDataMgr = new VisionDataMgr();
                    return visionDataMgr;
                }

            }
            else
                return visionDataMgr;
        }
        /// <summary>
        /// 读取视觉标定数据
        /// </summary>
        /// <returns></returns>
        public int Read()
        {
            string currentFile = AppDomain.CurrentDomain.BaseDirectory + @"\config\VisionData.json";
            if (!File.Exists(currentFile))
            {
                Save();
            }
            Object obs = AccessJosnSerializer.JsonToObject(currentFile,typeof(Dictionary<string, 点胶对位_VisionData>));
            if (obs != null)
            {
                visionData = (Dictionary<string, 点胶对位_VisionData>)obs;
            }
            return 0;
        }
        /// <summary>
        /// 保存视觉标定数据
        /// </summary>
        /// <returns></returns>
        public int Save()
        {
            string currentNozzleFile = AppDomain.CurrentDomain.BaseDirectory + @"\config\VisionData.json";
             bool a=AccessJosnSerializer.ObjectToJson(currentNozzleFile, visionData);
            return 0;

        }

        public void AddVisionCoordinate(string str_Name)
        {
            if (!visionData.ContainsKey(str_Name))
                visionData.Add(str_Name, new 点胶对位_VisionData());
        }
        public int setCameraCoordinate(string str_坐标系名称,double[] VX,double[] VY,double[] MX,double[] MY)
        {
            try
            {            
               if(visionData.ContainsKey(str_坐标系名称))
                {                    
                    visionData[str_坐标系名称].XY_Disp_CCD_Clib.SetVectorToHom2D(VX, VY, MX, MY, ref visionData[str_坐标系名称].hv_homMat2D_Disp_CCD_Clib);
                    return 0;
                }
                else
                    return -1;
            }
            catch (Exception)
            {

                return -1;
            }
            
        }

        #region 点胶对位

        /// <summary>
        /// 计算右上相机和下相机的相对位置 ---DressPos是吐胶位置，胶针与胶水的相对位置
        /// </summary>
        public void setVector_DispPin_DispCCD(string str_坐标系名称, XYUPoint snapMachinePos, XYUPoint dressMachinePos, XYUPoint VisionPixCCD)
        {
            XYUPoint CCDDiff = visionData[str_坐标系名称].XY_Disp_CCD_Clib.GetAffineTransResult(VisionPixCCD);
            XYUPoint Vector_DispCCD_DressPoint = new XYUPoint();
            //相机九点标定中心与胶水的相对位置
            Vector_DispCCD_DressPoint.x = snapMachinePos.x - CCDDiff.x;
            Vector_DispCCD_DressPoint.y = snapMachinePos.y - CCDDiff.y;
            XYUPoint xYUPoint = new XYUPoint();
            //吐胶位置与拍照位置的Y轴必须一致，因为标定位置不在移栽Y轴上，移栽Y轴移动时，CCDDiff的值不会补偿回来------------从而可得胶针与相机九点标定中心的相对位置
            xYUPoint.x = dressMachinePos.x - Vector_DispCCD_DressPoint.x;
            xYUPoint.y = dressMachinePos.y - Vector_DispCCD_DressPoint.y;
            xYUPoint.u = VisionPixCCD.u;
            visionData[str_坐标系名称].点胶对位_DispPin_DispCCD = xYUPoint;
        }
        public int getDispPin_Tagert(string str_坐标系名称, XYUPoint snapMachinePos,XYUPoint VisionPixCCD,out XYUPoint DispTagertPos)
        {
            XYUPoint CCDDiff = visionData[str_坐标系名称].XY_Disp_CCD_Clib.GetAffineTransResult(VisionPixCCD);
            //拍摄产品，可得 相机九点标定中心与产品的相对位置
            XYUPoint Vector_DispCCD_TagertPoint = new XYUPoint();
            Vector_DispCCD_TagertPoint.x = snapMachinePos.x - CCDDiff.x;
            Vector_DispCCD_TagertPoint.y = snapMachinePos.y - CCDDiff.y;

            DispTagertPos = new XYUPoint();
            //换算得胶针和产品的相对位置
            DispTagertPos.x = visionData[str_坐标系名称].点胶对位_DispPin_DispCCD.x + Vector_DispCCD_TagertPoint.x;
            DispTagertPos.y = visionData[str_坐标系名称].点胶对位_DispPin_DispCCD.y + Vector_DispCCD_TagertPoint.y;
            DispTagertPos.u = VisionPixCCD.u;
            return 0;
        }
        #endregion

        #region Senser定位
        public void setVector_DispCCD_Senser基准(string str_坐标系名称, XYUPoint snapMachinePos, XYUPoint VisionPixCCD)
        {
            XYUPoint CCDDiff = new XYUPoint();
            CCDDiff = visionData[str_坐标系名称].XY_Disp_CCD_Clib.GetAffineTransResult(VisionPixCCD);
            XYUPoint Vector_DispCCD_Senser = new XYUPoint();
            Vector_DispCCD_Senser.x = snapMachinePos.x - CCDDiff.x;
            Vector_DispCCD_Senser.y = snapMachinePos.y - CCDDiff.y;
            Vector_DispCCD_Senser.u = CCDDiff.u;
            visionData[str_坐标系名称].点胶对位_DispCCD_SenserCenter基准 = Vector_DispCCD_Senser;
        }
        public void setVector_DispCCD_Senser(string str_坐标系名称, XYUPoint snapMachinePos, XYUPoint VisionPixCCD)
        {
            XYUPoint CCDDiff = new XYUPoint();
            CCDDiff = visionData[str_坐标系名称].XY_Disp_CCD_Clib.GetAffineTransResult(VisionPixCCD);
            XYUPoint Vector_DispCCD_Senser = new XYUPoint();
            Vector_DispCCD_Senser.x = snapMachinePos.x - CCDDiff.x;
            Vector_DispCCD_Senser.y = snapMachinePos.y - CCDDiff.y;
            Vector_DispCCD_Senser.u = CCDDiff.u;
            XYUPoint Vector_DispPin_Senser_基准 = visionData[str_坐标系名称].点胶对位_DispCCD_SenserCenter基准;
            XYUPoint result = new XYUPoint();
            result.x = Vector_DispCCD_Senser.x - Vector_DispPin_Senser_基准.x;
            result.y = Vector_DispCCD_Senser.y - Vector_DispPin_Senser_基准.y;
            result.u = Vector_DispCCD_Senser.u - Vector_DispPin_Senser_基准.u;
            visionData[str_坐标系名称].点胶对位_DispCCD_SenserCenter = result;
        }
        public int getSenserCenterDiff(string str_坐标系名称, out XYUPoint tagertPoint)
        {

            XYUPoint Vector_DispPin_SenserCenter = visionData[str_坐标系名称].点胶对位_DispCCD_SenserCenter;
            tagertPoint = Vector_DispPin_SenserCenter;
            return 0;

        }

        #endregion

        #region 胶宽和胶型计算
        public double Funciton_换算像素至机械值(string str_坐标系名称, double d_PixValue)
        {
            XYUPoint CCDDiff_基准 = visionData[str_坐标系名称].XY_Disp_CCD_Clib.GetAffineTransResult(new XYUPoint(0, 0));
            XYUPoint CCDDiff = visionData[str_坐标系名称].XY_Disp_CCD_Clib.GetAffineTransResult(new XYUPoint(1, 0));
            double d_像素当量 = Math.Sqrt(Math.Pow(CCDDiff.x - CCDDiff_基准.x, 2) + Math.Pow(CCDDiff.y - CCDDiff_基准.y, 2));
            return d_PixValue * d_像素当量;
        }
        public double Funciton_换算像素至面积(string str_坐标系名称, double d_PixValue)
        {
            XYUPoint CCDDiff_基准 = visionData[str_坐标系名称].XY_Disp_CCD_Clib.GetAffineTransResult(new XYUPoint(0, 0));
            XYUPoint CCDDiff = visionData[str_坐标系名称].XY_Disp_CCD_Clib.GetAffineTransResult(new XYUPoint(1, 0));
            double d_像素当量 = Math.Sqrt(Math.Pow(CCDDiff.x - CCDDiff_基准.x, 2) + Math.Pow(CCDDiff.y - CCDDiff_基准.y, 2));
            return d_PixValue * d_像素当量 * d_像素当量;
        }
        #endregion

    }
    public enum CoordinateType
    {
        上CCD,
        下CCD
    }
    public class Vision_组装定位Mgr
    {
        private Vision_组装定位Mgr()
        {
        }
        private Dictionary<string, 组装定位_VisionData> visionData = new Dictionary<string, 组装定位_VisionData>();
        private static object obj = new object();
        private static Vision_组装定位Mgr visionDataMgr;
        /// <summary>
        /// 获取视觉数据管理句柄
        /// </summary>
        /// <returns></returns>
        public static Vision_组装定位Mgr GetInstance()
        {
            if (visionDataMgr == null)
            {
                lock (obj)
                {
                    visionDataMgr = new Vision_组装定位Mgr();
                    return visionDataMgr;
                }

            }
            else
                return visionDataMgr;
        }
        /// <summary>
        /// 读取视觉标定数据
        /// </summary>
        /// <returns></returns>
        public int Read()
        {
            string currentFile = AppDomain.CurrentDomain.BaseDirectory + @"\config\Vision_组装定位Mgr.json";
            if (!File.Exists(currentFile))
            {
                Save();
            }
            Object obs = AccessJosnSerializer.JsonToObject(currentFile, typeof(Dictionary<string, 组装定位_VisionData>));
            if (obs != null)
            {
                visionData = (Dictionary<string, 组装定位_VisionData>)obs;
            }
            return 0;
        }
        /// <summary>
        /// 保存视觉标定数据
        /// </summary>
        /// <returns></returns>
        public int Save()
        {
            string currentNozzleFile = AppDomain.CurrentDomain.BaseDirectory + @"\config\Vision_组装定位Mgr.json";
            bool a = AccessJosnSerializer.ObjectToJson(currentNozzleFile, visionData);
            return 0;

        }
        public void AddVisionCoordinate(string str_Name)
        {
            if (!visionData.ContainsKey(str_Name))
                visionData.Add(str_Name, new 组装定位_VisionData());
        }
        public int setCameraCoordinate(string str_坐标系名称, CoordinateType type, double[] VX, double[] VY, double[] MX, double[] MY)
        {
            try
            {
                if (visionData.ContainsKey(str_坐标系名称))
                {
                    if (type == CoordinateType.上CCD)
                        visionData[str_坐标系名称].XY_上CCD_Clib.SetVectorToHom2D(VX, VY, MX, MY, ref visionData[str_坐标系名称].hv_homMat2D_上CCD_Clib);
                    else if (type == CoordinateType.下CCD)
                        visionData[str_坐标系名称].XY_下CCD_Clib.SetVectorToHom2D(VX, VY, MX, MY, ref visionData[str_坐标系名称].hv_homMat2D_下CCD_Clib);
                    return 0;
                }
                else
                    return -1;
            }
            catch (Exception)
            {

                return -1;
            }

        }

        #region 组装对位
        /// <summary>
        /// 计算上相机和下相机的相对位置  上下相机视野需要是左右镜像，即同一条线，得出的角度是互补的
        /// </summary>
        public void setVector_上CCD_下CCD(string str_坐标系名称, XYUPoint snapMachinePos, XYUPoint VisionPix上CCD, XYUPoint VisionPix下CCD)
        {
            XYUPoint 上CCDDiff = visionData[str_坐标系名称].XY_上CCD_Clib.GetAffineTransResult(VisionPix上CCD);
            XYUPoint 下CCDDiff = visionData[str_坐标系名称].XY_下CCD_Clib.GetAffineTransResult(VisionPix下CCD);
            XYUPoint xYUPoint = new XYUPoint();
            //吐胶位置与拍照位置的Y轴必须一致，因为标定位置不在移栽Y轴上，移栽Y轴移动时，CCDDiff的值不会补偿回来------------从而可得胶针与相机九点标定中心的相对位置
            xYUPoint.x = snapMachinePos.x - 上CCDDiff.x - 下CCDDiff.x;
            xYUPoint.y = snapMachinePos.y - 上CCDDiff.y - 下CCDDiff.y;
            visionData[str_坐标系名称].d_上CCD_Angle = VisionPix上CCD.u;
            visionData[str_坐标系名称].d_下CCD_Angle = VisionPix下CCD.u;
            visionData[str_坐标系名称].组装定位_CCD上相机_CCD下相机 = xYUPoint;
        }

        /// <summary>
        /// 计算上相机和槽位的相对位置 
        /// </summary>
        public void setVector_上CCD_槽位(string str_坐标系名称, XYUPoint snapMachinePos, XYUPoint VisionPix上CCD)
        {
            XYUPoint 上CCDDiff = visionData[str_坐标系名称].XY_上CCD_Clib.GetAffineTransResult(VisionPix上CCD);
            XYUPoint xYUPoint = new XYUPoint();
            xYUPoint.x = snapMachinePos.x - 上CCDDiff.x;
            xYUPoint.y = snapMachinePos.y - 上CCDDiff.y;
            xYUPoint.u = VisionPix上CCD.u;
            visionData[str_坐标系名称].组装定位_CCD上相机_后壳 = xYUPoint;
        }
        /// <summary>
        /// 计算下相机和下相机的相对位置 
        /// </summary>
        public void setVector_下CCD_物料(string str_坐标系名称, XYUPoint snapMachinePos, XYUPoint VisionPix下CCD)
        {
            XYUPoint 下CCDDiff = visionData[str_坐标系名称].XY_下CCD_Clib.GetAffineTransResult(VisionPix下CCD);
            XYUPoint xYUPoint = new XYUPoint();
            //吐胶位置与拍照位置的Y轴必须一致，因为标定位置不在移栽Y轴上，移栽Y轴移动时，CCDDiff的值不会补偿回来------------从而可得胶针与相机九点标定中心的相对位置
            xYUPoint.x = snapMachinePos.x - 下CCDDiff.x;
            xYUPoint.y = snapMachinePos.y - 下CCDDiff.y;
            xYUPoint.u = VisionPix下CCD.u;
            visionData[str_坐标系名称].组装定位_CCD物料_下相机 = xYUPoint;
        }
        public int setRotateCenter(string str_坐标系名称, XYUPoint[] snapMachinePos, XYUPoint[] VisionPixCCD)
        {

            if (snapMachinePos.Length != VisionPixCCD.Length)
                return -1;
            double[] posX = new double[snapMachinePos.Length];
            double[] posY = new double[snapMachinePos.Length];
            for (int i = 0; i < snapMachinePos.Length; i++)
            {
                //像素坐标转换层机械相对坐标   机械相对坐标转换成机械绝对坐标------拟合成机械绝对坐标的旋转中心坐标               
                posX[i] = snapMachinePos[i].x - visionData[str_坐标系名称].XY_下CCD_Clib.GetAffineTransResult(VisionPixCCD[i]).x;
                posY[i] = snapMachinePos[i].y - visionData[str_坐标系名称].XY_下CCD_Clib.GetAffineTransResult(VisionPixCCD[i]).y;
            }
            int N = posX.Length;
            double sum_XX = 0;
            double sum_X = 0;
            double sum_XY = 0;
            double sum_Y = 0;
            double sum_XXX = 0;
            double sum_XYY = 0;
            double sum_XXAndYY = 0;
            double sum_YY = 0;
            double sum_XXY = 0;
            double sum_YYY = 0;
            for (int i = 0; i < N; i++)
            {
                sum_XX = sum_XX + posX[i] * posX[i];
                sum_X = sum_X + posX[i];
                sum_XY = sum_XY + posX[i] * posY[i];
                sum_Y = sum_Y + posY[i];
                sum_XXX = sum_XXX + posX[i] * posX[i] * posX[i];
                sum_XYY = sum_XYY + posX[i] * posY[i] * posY[i];
                sum_XXAndYY = sum_XXAndYY + posX[i] * posX[i] + posY[i] * posY[i];
                sum_YY = sum_YY + posY[i] * posY[i];
                sum_XXY = sum_XXY + posX[i] * posX[i] * posY[i];
                sum_YYY = sum_YYY + posY[i] * posY[i] * posY[i];
            }
            double C = N * sum_XX - sum_X * sum_X;
            double D = N * sum_XY - sum_X * sum_Y;
            double E = N * sum_XXX + N * sum_XYY - sum_XXAndYY * sum_X;
            double G = N * sum_YY - sum_Y * sum_Y;
            double H = N * sum_XXY + N * sum_YYY - sum_XXAndYY * sum_Y;

            double a = (H * D - E * G) / (C * G - D * D);
            double b = (H * C - E * D) / (D * D - G * C);
            double c = -(a * sum_X + b * sum_Y + sum_XXAndYY) / N;

            XYUPoint xYUPoint = new XYUPoint();
            xYUPoint.x = -a / 2;
            xYUPoint.y = -b / 2;

            visionData[str_坐标系名称].Point_旋转中心 = xYUPoint;
            return 0;
        }
        public double Get_组装角度(string str_坐标系名称, double d_Angle)
        {
            double d_AngleChange = visionData[str_坐标系名称].组装定位_CCD上相机_后壳.u - visionData[str_坐标系名称].d_上CCD_Angle;
            double d_下CCD_Result = visionData[str_坐标系名称].d_下CCD_Angle + d_AngleChange;
            double d_Result = d_下CCD_Result - d_Angle;
            if (d_Result < -180)
                d_Result = d_Result + 360;
            else if (d_Result > 180)
                d_Result = d_Result - 360;
            return d_Result;
        }
        public int get组装目标位置(string str_坐标系名称, out XYUPoint TagertPos, bool b_Rotate)
        {
            XYUPoint Point_物料_下相机 = visionData[str_坐标系名称].组装定位_CCD物料_下相机;
            XYUPoint Point_上相机_下相机 = visionData[str_坐标系名称].组装定位_CCD上相机_CCD下相机;
            XYUPoint Point_上相机_槽位 = visionData[str_坐标系名称].组装定位_CCD上相机_后壳;
            XYUPoint Point_Rotate = visionData[str_坐标系名称].Point_旋转中心;
            //角度换算有待确认
            double d_RotateU = Get_组装角度(str_坐标系名称, Point_物料_下相机.u);
            TagertPos.u = d_RotateU;
            double d_Rad = TagertPos.u / 180.0 * Math.PI;
            XYUPoint Point_旋转后_物料_下相机 = new XYUPoint();
            Point_旋转后_物料_下相机 = new XYUPoint();
            //换算得胶针和产品的相对位置
            Point_旋转后_物料_下相机.x = Point_Rotate.x + (Point_物料_下相机.x - Point_Rotate.x) * Math.Cos(d_Rad) - (Point_物料_下相机.y - Point_Rotate.y) * Math.Sin(d_Rad);
            Point_旋转后_物料_下相机.y = Point_Rotate.y + (Point_物料_下相机.x - Point_Rotate.x) * Math.Sin(d_Rad) + (Point_物料_下相机.y - Point_Rotate.y) * Math.Cos(d_Rad);
            if (b_Rotate)
            {
                TagertPos.x = Point_旋转后_物料_下相机.x - Point_上相机_下相机.x + Point_上相机_槽位.x;
                TagertPos.y = Point_旋转后_物料_下相机.y - Point_上相机_下相机.y + Point_上相机_槽位.y;
            }
            else
            {
                TagertPos.x = Point_物料_下相机.x - Point_上相机_下相机.x + Point_上相机_槽位.x;
                TagertPos.y = Point_物料_下相机.y - Point_上相机_下相机.y + Point_上相机_槽位.y;
            }
            return 0;
        }
        #endregion

    }

}