﻿using BaseTool;
using OpenCvSharp;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

namespace Airthmetic
{
    /// <summary>
    /// 重建工具
    /// </summary>
    public class ReconstructTool
    {
        /// <summary> 重建参数 </summary>
        public ReconstructParam Param = new ReconstructParam();
        /// <summary> 重建数据/结果 </summary>
        public ReconstructData Data = new ReconstructData();

        /// <summary> 重建时间，触发一次更新一次 </summary>
        private DateTime ReconstructTime = DateTime.Now;

        #region 参数读取&保存
        /// <summary>
        /// 读取重建参数
        /// </summary>
        /// <param name="ReconstructXml">读取路径，空为Alltype中的配置</param>
        /// <returns></returns>
        public bool Read(string ReconstructXml = null)
        {
            try
            {
                string path = string.IsNullOrWhiteSpace(ReconstructXml) ? PathTool.ReconstructParamXml : ReconstructXml;
                if (!File.Exists(path))
                {
                    Param = new ReconstructParam();
                    Save();
                }

                Param = (ReconstructParam)FileTool.ReadXML(path, typeof(ReconstructParam));
                Log.SaveLog($"读取重建工具参数成功", LogType.Run, Color.Black);
                return true;
            }
            catch (Exception ex)
            {
                Log.SaveError(ex, $"读取重建工具参数异常，{ex.Message}");
                return false;
            }
        }

        /// <summary>
        /// 写入重建参数
        /// </summary>
        /// <param name="ReconstructXml"></param>
        /// <returns></returns>
        public bool Save(string ReconstructXml = null)
        {
            try
            {
                string path = string.IsNullOrWhiteSpace(ReconstructXml) ? PathTool.ReconstructParamXml : ReconstructXml;
                bool isok = FileTool.WriteXML(path, Param);
                Log.SaveLog($"保存重建工具参数{(isok ? "成功" : "失败")}", LogType.Run, isok ? Color.Black : Color.Red);
                return isok;
            }
            catch (Exception ex)
            {
                Log.SaveError(ex, $"保存重建工具参数异常，{ex.Message}");
                return false;
            }
        }
        #endregion

        /// <summary>
        /// 入口：重建
        /// </summary>
        /// <param name="input"></param>
        public void Reconstruct(ReconstructInput input)
        {
            string msg = "";
            try
            {
                //总运行时间
                Stopwatch spTotal = Stopwatch.StartNew();
                long spLastms = 0;

                #region 初始化
                //标定时间初始化
                ReconstructTime = DateTime.Now;

                //重建保存路径  Debug\Data\Reconstruct\年月日_时分秒\
                string SavePath = $"{PathTool.DataReconstructPath}{ReconstructTime.ToString("yyyyMMdd_HHmmss")}\\";

                //设置进度条
                Machine.StatusBarAction?.Invoke(-1, "", Color.Black);
                //重建数据/结果初始化
                Data.Init();
                #endregion

                #region 数据加载 + 预处理
                // 1.加载图像 + 预处理
                //Machine.StatusBarAction?.Invoke(0, "数据预处理中...", Color.Black);
                //LoadImage(input);
                //Log.SaveLog($"重建：预处理图像耗时：{spTotal.ElapsedMilliseconds - spLastms}ms", LogType.Run, Color.Black);
                //spLastms = spTotal.ElapsedMilliseconds;

                //数据不足判断
                if (input.capImageCount == 0)
                {
                    msg = $"重建失败：预处理图像数组为空";
                    Log.SaveLog(msg, LogType.Run, Color.Red);
                    Machine.StatusBarAction?.Invoke(1, msg, Color.Red);
                    return;
                }

                //2.创建重建文件夹 + 复制预处理图像至重建图像文件夹下
                Machine.StatusBarAction?.Invoke(0.05, $"数据预处理中...", Color.Black);
                CopyImage(input, SavePath);
                Log.SaveLog($"重建：复制图像耗时：{spTotal.ElapsedMilliseconds - spLastms}ms", LogType.Run, Color.Black);
                spLastms = spTotal.ElapsedMilliseconds;
                #endregion

                #region 重建
                //3.重建数据
                int Count = input.capImageCount;
                Machine.StatusBarAction?.Invoke(0.1, $"重建数据中...0/{Count}", Color.Black);
                for (int i = 0; i < Count; i++)
                {
                    string timeMsg = "";
                    ReconstructMeasure(input, i, out timeMsg);

                    Machine.StatusBarAction?.Invoke((double)(i + 1) / (double)Count * 0.85 + 0.1, $"重建数据中...{i + 1}/{Count}", Color.Black);
                    Log.SaveLog($"重建：重建数据(第{i + 1}组)耗时：{spTotal.ElapsedMilliseconds - spLastms}ms{timeMsg}", LogType.Run, Color.Black);
                    spLastms = spTotal.ElapsedMilliseconds;
                }
                #endregion

                //4.保存参数
                Machine.StatusBarAction?.Invoke(0.95, $"保存参数中...", Color.Black);

                string XmlPath = $"{SavePath}Xml\\";
                if (!Directory.Exists(XmlPath))
                    Directory.CreateDirectory(XmlPath);
                FileTool.WriteXML($"{XmlPath}CalibrationResult.xml", input.caliResults);
                FileTool.WriteXML($"{XmlPath}ReconstructParam.xml", Param);
                //FileTool.WriteXML($"{XmlPath}PatternParam.xml", patternData);

                //string IniPath = XmlPath + "Index.ini";
                //FileTool.WriteIni(IniPath, "Reconstruct", "CaptureCount", input.capImageCount);
                //FileTool.WriteIni(IniPath, "Reconstruct", "PretreatmentCount", input.preImageCount);
                //FileTool.WriteIni(IniPath, "Reconstruct", "FinishCount", input.finImageCount);
                //string CapturePath = "";
                //foreach (var file in input.preImage1Path) { CapturePath += $"\n{file}"; }
                //FileTool.WriteIni(IniPath, "Reconstruct", "CapturePath[]", CapturePath);
                //FileTool.WriteIni(IniPath, "Reconstruct", "CalibrationTime", PathTool.GetDateTimeFolderName(ReconstructTime));
                //FileTool.WriteIni(IniPath, "Reconstruct", "CalibrationTicks", ReconstructTime.Ticks.ToString());
                //FileTool.WriteIni(IniPath, "Reconstruct", "CalibrationPath", SavePath);

                Machine.StatusBarAction?.Invoke(1, $"重建数据完成", Color.Black);
                Log.SaveLog($"重建：重建数据总耗时：{spTotal.ElapsedMilliseconds}ms", LogType.Run, Color.Black);
            }
            catch (Exception ex)
            {
                Log.SaveError(ex, $"重建异常：{ex.Message}");
                Machine.StatusBarAction?.Invoke(1, "重建异常", Color.Red);
            }
        }

        #region 辅助方法
        /// <summary>
        /// 2.创建重建文件夹 + 复制预处理图像至重建图像文件夹下
        /// </summary>
        /// <param name="input">传入输入参数</param>
        /// <param name="SavePath">重建图像文件夹</param>
        private void CopyImage(ReconstructInput input, string SavePath)
        {
            string ImageSavePath = SavePath + "Image\\";

            if (!Directory.Exists(ImageSavePath))
                Directory.CreateDirectory(ImageSavePath);

            int Count = input.capImageCount;
            for (int i = 0; i < Count; i++)
            {
                string indexName = (i + 1).ToString().PadLeft(2, '0');

                //先复制原始第二张图
                DirectoryInfo OriDir = new DirectoryInfo(input.capImagePath[i]);
                foreach (FileInfo OriFile in OriDir.GetFiles())
                {
                    if (!(OriFile.Extension == ".bmp" ||
                        OriFile.Extension == ".jpg" ||
                        OriFile.Extension == ".png"))
                        continue;
                    if (!OriFile.Name.Contains("01"))
                        continue;

                    OriFile.CopyTo(ImageSavePath + indexName + OriFile.Extension);
                    break;
                }

                //复制预处理图像
                string ticksPath = ImageSavePath + indexName + "\\";
                Directory.CreateDirectory(ticksPath);
                foreach (string pf in Directory.GetFiles(input.capImagePath[i]))
                {
                    FileInfo PreFile = new FileInfo(pf);
                    if (!(PreFile.Extension == ".bmp" ||
                        PreFile.Extension == ".jpg" ||
                        PreFile.Extension == ".png"))
                        continue;
                    PreFile.CopyTo(ticksPath + PreFile.Name);
                }
                input.preImage2Path.Add(ticksPath);

                Log.SaveLog($"复制图像[{i + 1}/{Count}]完成", LogType.Run, Color.Black);
            }
        }

        /// <summary>
        /// 3.重建数据 
        /// </summary>
        /// <param name="input">传入输入参数</param>
        /// <param name="index">传入索引</param>
        /// <param name="timeMsg">传入索引</param>
        private void ReconstructMeasure(ReconstructInput input, int index, out string timeMsg)
        {
            timeMsg = "";

            try
            {
                Stopwatch sw = Stopwatch.StartNew();
                //3.1解码
                Mat pattern_image;
                Mat min_max_image;
                CalibrationTool.DecodeGraySet(input.preImage2Path[index],
                    input.PatternCount, input.HorizontalDepth, input.VerticalDepth,
                    Param.Threshold, Param.BlackLightPowerRatio, Param.MinDirectLightComponent,
                    input.caliResults.Param.CameraSize, input.caliResults.Param.ProjectorSize,
                    out pattern_image, out min_max_image);

                //3.2获取亮度图
                DirectoryInfo di = new DirectoryInfo(input.preImage2Path[index]);
                FileInfo[] fis = di.Parent.GetFiles();
                Mat color_image = null;
                foreach (FileInfo fi in fis)
                {
                    if (fi.Name.Contains(di.Name))
                    {
                        color_image = Cv2.ImRead(fi.FullName).Resize(input.caliResults.Param.CameraSize);
                        break;
                    }
                }
                if (color_image == null)
                {
                    timeMsg = "";
                    Log.SaveLog($"重建失败：(第{index + 1}组)，缺少彩色图像，{di.Parent.FullName}\\{di.Name}(.bmp\\.png\\.jpg)", LogType.Run, Color.Red);
                    input.finImageFlag[index] = false;
                    return;
                }

                timeMsg += $"解码耗时{sw.ElapsedMilliseconds}ms";
                sw.Restart();

                //3.3重建
                PointCloud pointCloud = new PointCloud();
                ReconstructModelPatchCenter(
                    input,
                    Param.Threshold, Param.MaxRayDistance,
                    pointCloud, pattern_image, min_max_image, color_image);

                //3.4保存数据
                DirectoryInfo mdir = new DirectoryInfo(input.preImage2Path[index]);
                string _SavePath1 = mdir.Parent.Parent.FullName + "\\ReconstructImage\\" + mdir.Name + "\\";
                //ImageTool.SaveMat(_SavePath1, "1_PointsImage", pointCloud.points);
                ImageTool.SaveMat(_SavePath1, "2_ColorImage", pointCloud.colors);
                //ImageTool.SaveMat(_SavePath1, "Tiff_1_PointsImage", pointCloud.points, true);
                ImageTool.SaveMat(_SavePath1, "Tiff_2_ColorImage", pointCloud.colors, true);

                timeMsg += $"、重建耗时{sw.ElapsedMilliseconds}ms";
                sw.Restart();

                //3.4 计算法线
                compute_normals(pointCloud);

                //ImageTool.SaveMat(_SavePath1, "3_NormalsImage", pointCloud.normals);
                //ImageTool.SaveMat(_SavePath1, "Tiff_3_NormalsImage", pointCloud.normals, true);

                //3.5保存ply
                string _SavePath2 = mdir.Parent.Parent.FullName + "\\ReconstructImage\\" + mdir.Name + "_";
                SavePly(pointCloud, _SavePath2);
                SavePly2(pointCloud, _SavePath2);
                //Data.PlyPath.Add($"{_SavePath2}PointCloud_6.ply");

                timeMsg += $"、计算法线耗时{sw.ElapsedMilliseconds}ms";
                sw.Restart();
            }
            catch (Exception ex)
            {
                Log.SaveError(ex, $"重建异常：(第{index + 1}组)，{ex.Message}", LogType.Run);
                input.finImageFlag[index] = false;
            }
            finally
            {
                if (!string.IsNullOrWhiteSpace(timeMsg))
                    timeMsg = $"({timeMsg})";
            }
        }

        /// <summary>
        /// 3.3 重建
        /// </summary>
        /// <param name="input"></param>
        /// <param name="Threshold"></param>
        /// <param name="MaxRayDistance"></param>
        /// <param name="pointCloud"></param>
        /// <param name="pattern_image"></param>
        /// <param name="min_max_image"></param>
        /// <param name="color_image"></param>
        public static void ReconstructModelPatchCenter(ReconstructInput input,
            int Threshold, double MaxRayDistance,
            PointCloud pointCloud,
            Mat pattern_image, Mat min_max_image, Mat color_image)
        {
            OpenCvSharp.Size projectorSize = input.caliResults.Param.ProjectorSize;
            //int Threshold = reconstructParam.Threshold;
            //double MaxRayDistance = reconstructParam.MaxRayDistance;
            //double plane_dist = 100.0;

            //背景去除
            /* background removal
            cv::Point2i plane_coord[3];
            plane_coord[0] = cv::Point2i(config.value("background_plane/x1").toUInt(), config.value("background_plane/y1").toUInt());
            plane_coord[1] = cv::Point2i(config.value("background_plane/x2").toUInt(), config.value("background_plane/y2").toUInt());
            plane_coord[2] = cv::Point2i(config.value("background_plane/x3").toUInt(), config.value("background_plane/y3").toUInt());

            if (plane_coord[0].x<=0 || plane_coord[0].x>=pattern_local.cols
                || plane_coord[0].y<=0 || plane_coord[0].y>=pattern_local.rows)
            {
                plane_coord[0] = cv::Point2i(50, 50);
                config.setValue("background_plane/x1", plane_coord[0].x);
                config.setValue("background_plane/y1", plane_coord[0].y);
            }
            if (plane_coord[1].x<=0 || plane_coord[1].x>=pattern_local.cols
                || plane_coord[1].y<=0 || plane_coord[1].y>=pattern_local.rows)
            {
                plane_coord[1] = cv::Point2i(50, pattern_local.rows-50);
                config.setValue("background_plane/x2", plane_coord[1].x);
                config.setValue("background_plane/y2", plane_coord[1].y);
            }
            if (plane_coord[2].x<=0 || plane_coord[2].x>=pattern_local.cols
                || plane_coord[2].y<=0 || plane_coord[2].y>=pattern_local.rows)
            {
                plane_coord[2] = cv::Point2i(pattern_local.cols-50, 50);
                config.setValue("background_plane/x3", plane_coord[2].x);
                config.setValue("background_plane/y3", plane_coord[2].y);
            }
            */

            //初始点云
            int scale_factor_x = 1;
            int scale_factor_y = projectorSize.Width > projectorSize.Height ? 1 : 2;//XXX HACK:保留常规纵横比XXX HACK
            int out_cols = projectorSize.Width / scale_factor_x;
            int out_rows = projectorSize.Height / scale_factor_y;
            pointCloud.init_points(out_rows, out_cols);

            //take 3 points in back plane
            /*cv::Mat plane;
            if (remove_background)
            {
                cv::Point3d p[3];
                for (unsigned i=0; i<3;i++)
                {
                    for (unsigned j=0; 
                        j<10 && (
                            INVALID(pattern_local.at<cv::Vec2f>(plane_coord[i].y, plane_coord[i].x)[0])
                            || INVALID(pattern_local.at<cv::Vec2f>(plane_coord[i].y, plane_coord[i].x)[1])); j++)
                    {
                        plane_coord[i].x += 1.f;
                    }
                    const cv::Vec2f & pattern = pattern_local.at<cv::Vec2f>(plane_coord[i].y, plane_coord[i].x);

                    const float col = pattern[0];
                    const float row = pattern[1];

                    if (projector_size.width<=static_cast<int>(col) || projector_size.height<=static_cast<int>(row))
                    {   //abort
                        continue;
                    }

                    //shoot a ray through the image: u=\lambda*v + q
                    cv::Point3d u1 = camera.to_world_coord(plane_coord[i].x, plane_coord[i].y);
                    cv::Point3d v1 = camera.world_ray(plane_coord[i].x, plane_coord[i].y);

                    //shoot a ray through the projector: u=\lambda*v + q
                    cv::Point3d u2 = projector.to_world_coord(col, row);
                    cv::Point3d v2 = projector.world_ray(col, row);

                    //compute ray-ray approximate intersection
                    double distance = 0.0;
                    p[i] = geometry::approximate_ray_intersection(v1, u1, v2, u2, &distance);
                    std::cout << "Plane point " << i << " distance " << distance << std::endl;
                }
                plane = geometry::get_plane(p[0], p[1], p[2]);
                if (cv::Mat(plane.rowRange(0,3).t()*cv::Mat(cv::Point3d(p[0].x, p[0].y, p[0].Z-1.0)) + plane.at<double>(3,0)).at<double>(0,0)
                        <0.0)
                {
                    plane = -1.0*plane;
                }
                std::cout << "Background plane: " << plane << std::endl;
            }
            */

            //候选点
            List<int> index_points = new List<int>();
            List<Point2f> proj_points = new List<Point2f>();
            List<List<Point2f>> cam_points = new List<List<Point2f>>();

            for (int h = 0; h < pattern_image.Rows; h++)
            {
                for (int w = 0; w < pattern_image.Cols; w++)
                {
                    Vec2f pattern = pattern_image.At<Vec2f>(h, w);
                    Vec2b min_max = min_max_image.At<Vec2b>(h, w);


                    if (ImageTool.INVALID(pattern) ||
                        pattern[0] < 0 || pattern[0] > projectorSize.Width ||
                        pattern[1] < 0 || pattern[1] > projectorSize.Height ||
                        (min_max[1] - min_max[0]) < Threshold)
                        continue;

                    //ok
                    Point2f proj_point = new Point2f(pattern[0] / scale_factor_x, pattern[1] / scale_factor_y);
                    int index = (int)proj_point.Y * out_cols + (int)proj_point.X;

                    int ii = index_points.IndexOf(index);
                    if (ii != -1)
                        cam_points[ii].Add(new Point2f(w, h));
                    else
                    {
                        index_points.Add(index);
                        proj_points.Add(proj_point);
                        cam_points.Add(new List<Point2f>());
                        cam_points[cam_points.Count - 1].Add(new Point2f(w, h));
                    }
                }
            }

            Mat cam_K = input.caliResults.Get_cam_k;
            Mat cam_kc = input.caliResults.Get_cam_kc;
            Mat proj_K = input.caliResults.Get_proj_k;
            Mat proj_kc = input.caliResults.Get_proj_kc;
            Mat T = input.caliResults.Get_T;
            Mat Rt = input.caliResults.Get_R.T();

            int good = 0;
            int bad = 0;
            int error = 0;
            int outside = 0;

            for (int n = 0; n < index_points.Count; n++)
            {
                try
                {
                    int index = index_points[n];
                    Point2f proj_point = proj_points[n];
                    List<Point2f> cam_point_list = cam_points[n];
                    int count = cam_point_list.Count;

                    if (count == 0)
                        continue;

                    //中心平均
                    Point2d sum = new Point2d(0.0, 0.0);
                    Point2d sum2 = new Point2d(0.0, 0.0);
                    foreach (Point2f iter in cam_point_list)
                    {
                        sum.X += iter.X;
                        sum.Y += iter.Y;
                        sum2.X += iter.X * iter.X;
                        sum2.Y += iter.Y * iter.Y;
                    }
                    Point2d cam = new Point2d(sum.X / count, sum.Y / count);
                    Point2d proj = new Point2d(proj_point.X * scale_factor_x, proj_point.Y * scale_factor_y);


                    //三角化triangulate
                    double distance = MaxRayDistance;   //质量测量 quality meassure
                    Point3d p;                          //重建点 reconstructed point

                    triangulate_stereo(cam_K, cam_kc, proj_K, proj_kc, Rt, T, cam, proj, out p, ref distance);

                    if (distance < MaxRayDistance)
                    {
                        //good point

                        int _y = (int)proj_point.Y;
                        int _x = (int)proj_point.X;
                        if (_y > 0 && _y < pointCloud.points.Rows &&
                            _x > 0 && _x < pointCloud.points.Cols)
                        {
                            good++;

                            pointCloud.points.Set<Vec3f>(_y, _x, new Vec3f((float)p.X, (float)p.Y, (float)p.Z));
                            pointCloud.colors.Set<Vec3b>(_y, _x, color_image.At<Vec3b>((int)cam.Y, (int)cam.X));
                            pointCloud.indexList.Add(new int[] { _y, _x });
                        }
                        else
                        {
                            outside++;
                        }
                    }
                    else
                        bad++;

                }
                catch (Exception ex)
                {
                    Log.SaveError(ex);
                    error++;
                }
            }
            Log.SaveLog($"点重建数量 {good}/{index_points.Count}，异常：{error}，坏点：{bad}，区域外：{outside}", LogType.Run, Color.Black);
        }

        /// <summary>
        /// 3.3.1 三角化
        /// </summary>
        /// <param name="k1"></param>
        /// <param name="kc1"></param>
        /// <param name="k2"></param>
        /// <param name="kc2"></param>
        /// <param name="Rt"></param>
        /// <param name="T"></param>
        /// <param name="p1"></param>
        /// <param name="p2"></param>
        /// <param name="p3d"></param>
        /// <param name="distance"></param>
        /// <exception cref="Exception"></exception>
        private static void triangulate_stereo(
            Mat k1, Mat kc1, Mat k2, Mat kc2,
            Mat Rt, Mat T, Point2d p1, Point2d p2, out Point3d p3d, ref double distance)
        {
            //to image camera coordinates
            Mat inp1 = new Mat(1, 1, MatType.CV_64FC2);
            Mat inp2 = new Mat(1, 1, MatType.CV_64FC2);
            inp1.Set<Vec2d>(0, 0, new Vec2d(p1.X, p1.Y));
            inp2.Set<Vec2d>(0, 0, new Vec2d(p2.X, p2.Y));
            Mat outp1 = new Mat();
            Mat outp2 = new Mat();
            Cv2.UndistortPoints(inp1, outp1, k1, kc1);
            Cv2.UndistortPoints(inp2, outp2, k2, kc2);
            if (outp1.Type() != MatType.CV_64FC2 || outp1.Rows != 1 || outp1.Cols != 1 ||
                outp2.Type() != MatType.CV_64FC2 || outp2.Rows != 1 || outp2.Cols != 1)
                throw new Exception($"triangulate_stereo:outp1、outp2 Error");
            Vec2d outvec1 = outp1.At<Vec2d>(0, 0);
            Vec2d outvec2 = outp2.At<Vec2d>(0, 0);
            Point3d u1 = new Point3d(outvec1[0], outvec1[1], 1.0);
            Point3d u2 = new Point3d(outvec2[0], outvec2[1], 1.0);

            //to world coordinates
            Point3d w1 = u1;
            Mat u2_temp = new Mat(3, 1, MatType.CV_64FC1);
            u2_temp.Set<double>(0, 0, u2.X - T.At<double>(0, 0));
            u2_temp.Set<double>(1, 0, u2.Y - T.At<double>(1, 0));
            u2_temp.Set<double>(2, 0, u2.Z - T.At<double>(2, 0));
            Mat w2_temp = Rt * u2_temp;
            Point3d w2 = new Point3d(w2_temp.At<double>(0, 0), w2_temp.At<double>(0, 1), w2_temp.At<double>(0, 2));

            //world rays
            Point3d v1 = w1;
            Mat u3_temp = new Mat(3, 1, MatType.CV_64FC1);
            u3_temp.Set<double>(0, 0, u2.X);
            u3_temp.Set<double>(1, 0, u2.Y);
            u3_temp.Set<double>(2, 0, u2.Z);
            Mat w3_temp = Rt * u3_temp;
            Point3d v2 = new Point3d(w3_temp.At<double>(0, 0), w3_temp.At<double>(0, 1), w3_temp.At<double>(0, 2));

            //计算射线与射线的近似交点 compute ray-ray approximate intersection
            double lambda1, lambda2;
            p3d = approximate_ray_intersection(v1, w1, v2, w2, ref distance, out lambda1, out lambda2);
        }

        /// <summary>
        /// 3.3.1.1 光线交点
        /// </summary>
        /// <param name="v1"></param>
        /// <param name="q1"></param>
        /// <param name="v2"></param>
        /// <param name="q2"></param>
        /// <param name="distance"></param>
        /// <param name="lambda1"></param>
        /// <param name="lambda2"></param>
        /// <returns></returns>
        private static Point3d approximate_ray_intersection(Point3d v1, Point3d q1, Point3d v2, Point3d q2, ref double distance, out double lambda1, out double lambda2)
        {
            Mat v1mat = new Mat(3, 1, MatType.CV_64FC1);
            v1mat.Set<double>(0, 0, v1.X);
            v1mat.Set<double>(1, 0, v1.Y);
            v1mat.Set<double>(2, 0, v1.Z);
            Mat v2mat = new Mat(3, 1, MatType.CV_64FC1);
            v2mat.Set<double>(0, 0, v2.X);
            v2mat.Set<double>(1, 0, v2.Y);
            v2mat.Set<double>(2, 0, v2.Z);

            Mat v11 = v1mat.T() * v1mat;
            Mat v22 = v2mat.T() * v2mat;
            Mat v12 = v1mat.T() * v2mat;
            Mat v21 = v2mat.T() * v1mat;

            double v1tv1 = v11.At<double>(0, 0);
            double v2tv2 = v22.At<double>(0, 0);
            double v1tv2 = v12.At<double>(0, 0);
            double v2tv1 = v21.At<double>(0, 0);

            //cv::Mat V(2, 2, CV_64FC1);
            //V.at<double>(0,0) = v1tv1;  V.at<double>(0,1) = -v1tv2;
            //V.at<double>(1,0) = -v2tv1; V.at<double>(1,1) = v2tv2;
            //std::cout << " V: "<< V << std::endl;

            Mat Vinv = new Mat(2, 2, MatType.CV_64FC1);
            double detV = v1tv1 * v2tv2 - v1tv2 * v2tv1 /*+ Math.Pow(10, -9)*/;
            Vinv.Set<double>(0, 0, v2tv2 / detV);
            Vinv.Set<double>(0, 1, v1tv2 / detV);
            Vinv.Set<double>(1, 0, v2tv1 / detV);
            Vinv.Set<double>(1, 1, v1tv1 / detV);

            //cv::Mat Q(2, 1, CV_64FC1);
            //Q.at<double>(0,0) = cv::Mat(v1mat.t()*(cv::Mat(q2-q1))).at<double>(0,0);
            //Q.at<double>(1,0) = cv::Mat(v2mat.t()*(cv::Mat(q1-q2))).at<double>(0,0);
            //std::cout << " Q: "<< Q << std::endl;

            Point3d q2_q1 = q2 - q1;
            double Q1 = v1.X * q2_q1.X + v1.Y * q2_q1.Y + v1.Z * q2_q1.Z;
            double Q2 = -(v2.X * q2_q1.X + v2.Y * q2_q1.Y + v2.Z * q2_q1.Z);

            //cv::Mat L = V.inv()*Q;
            //cv::Mat L = Vinv*Q;
            //std::cout << " L: "<< L << std::endl;

            lambda1 = (v2tv2 * Q1 + v1tv2 * Q2) / detV;
            lambda2 = (v2tv1 * Q1 + v1tv1 * Q2) / detV;

            //cv::Mat p1 = L.at<double>(0,0)*v1mat + cv::Mat(q1); //ray1
            //cv::Mat p2 = L.at<double>(1,0)*v2mat + cv::Mat(q2); //ray2
            //cv::Point3d p1 = L.at<double>(0,0)*v1 + q1; //ray1
            //cv::Point3d p2 = L.at<double>(1,0)*v2 + q2; //ray2

            //ray1
            Point3d p1 = new Point3d(lambda1 * v1.X + q1.X, lambda1 * v1.Y + q1.Y, lambda1 * v1.Z + q1.Z);
            //ray2
            Point3d p2 = new Point3d(lambda2 * v2.X + q2.X, lambda2 * v2.Y + q2.Y, lambda2 * v2.Z + q2.Z);

            //cv::Point3d p = cv::Point3d(cv::Mat((p1+p2)/2.0));
            Point3d p = new Point3d(0.5 * (p1.X + p2.X), 0.5 * (p1.Y + p2.Y), 0.5 * (p1.Z + p2.Z));

            Mat p4 = new Mat(3, 1, MatType.CV_64FC1);
            p4.At<double>(0, 0) = p2.X - p1.X;
            p4.At<double>(1, 0) = p2.Y - p1.Y;
            p4.At<double>(2, 0) = p2.Z - p1.Z;
            distance = Cv2.Norm(p4);

            return p;
        }

        /// <summary>
        /// 3.4 计算法线
        /// </summary>
        /// <param name="pointCloud"></param>
        public static void compute_normals(PointCloud pointCloud)
        {
            for (int h = 1; h + 1 < pointCloud.points.Rows; h++)
            {
                for (int w = 1; w + 1 < pointCloud.points.Cols; w++)
                {
                    Vec3f w1 = pointCloud.points.At<Vec3f>(h, w - 1);
                    Vec3f w2 = pointCloud.points.At<Vec3f>(h, w + 1);

                    Vec3f h1 = pointCloud.points.At<Vec3f>(h - 1, w);
                    Vec3f h2 = pointCloud.points.At<Vec3f>(h + 1, w);

                    if (ImageTool.INVALID(w1[0]) ||
                        ImageTool.INVALID(w2[0]) ||
                        ImageTool.INVALID(h1[0]) ||
                        ImageTool.INVALID(h2[0]))
                        continue;

                    Point3d n1 = new Point3d(w2[0] - w1[0], w2[1] - w1[1], w2[2] - w1[2]);
                    Point3d n2 = new Point3d(h2[0] - h1[0], h2[1] - h1[1], h2[2] - h1[2]);

                    Point3d normal = new Point3d(-n2.Z * n1.Y + n2.Y * n1.Z, n2.Z * n1.X - n2.X * n1.Z, -n2.Y * n1.X + n2.X * n1.Y);
                    double norm = Math.Sqrt(normal.X * normal.X + normal.Y * normal.Y + normal.Z * normal.Z);

                    if (norm > 0)
                        pointCloud.normals.At<Vec3f>(h, w) = new Vec3f((float)(normal.X / norm), (float)(normal.Y / norm), (float)(normal.Z / norm));
                }
            }
        }



        /// <summary> 3.5保存ply </summary>
        private static void SavePly(PointCloud pointCloud, string dirPath)
        {
            StringBuilder stringBuilder = new StringBuilder();

            //头
            stringBuilder.AppendLine("ply");
            stringBuilder.AppendLine("format ascii 1.0");
            stringBuilder.AppendLine($"element vertex {pointCloud.indexList.Count}");
            stringBuilder.AppendLine("property float x");
            stringBuilder.AppendLine("property float y");
            stringBuilder.AppendLine("property float z");
            stringBuilder.AppendLine("property float nx");
            stringBuilder.AppendLine("property float ny");
            stringBuilder.AppendLine("property float nz");
            stringBuilder.AppendLine("property uchar red");
            stringBuilder.AppendLine("property uchar green");
            stringBuilder.AppendLine("property uchar blue");
            stringBuilder.AppendLine("end_header");

            pointCloud.GetPly(ref stringBuilder);

            FileTool.WriteTxt($"{dirPath}PointCloud_9.ply", stringBuilder.ToString());
        }
        /// <summary> 3.5保存ply </summary>
        private static void SavePly2(PointCloud pointCloud, string dirPath)
        {
            StringBuilder stringBuilder = new StringBuilder();

            //头
            stringBuilder.AppendLine("ply");
            stringBuilder.AppendLine("format ascii 1.0");
            stringBuilder.AppendLine($"element vertex {pointCloud.indexList.Count}");
            stringBuilder.AppendLine("property float x");
            stringBuilder.AppendLine("property float y");
            stringBuilder.AppendLine("property float z");
            //stringBuilder.AppendLine("property float nx");
            //stringBuilder.AppendLine("property float ny");
            //stringBuilder.AppendLine("property float nz");
            stringBuilder.AppendLine("property uchar red");
            stringBuilder.AppendLine("property uchar green");
            stringBuilder.AppendLine("property uchar blue");
            stringBuilder.AppendLine("end_header");

            pointCloud.GetPly2(ref stringBuilder);

            FileTool.WriteTxt($"{dirPath}PointCloud_6.ply", stringBuilder.ToString());
        }
        #endregion
    }
}
