#pragma once
#include "opencv2/opencv.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "slStruct.h"
#include "fstream"

#if USECUDA
#include "reconstruct.cuh"
#endif

#ifndef slLog
#define slLog Log::getInstance()
#endif
#ifndef PI
#define PI 3.14159265359
#endif

using namespace cv;

namespace SL
{
    namespace reconstruct
    {
        Vec3f getEpipolarlineCv(Vec2f point, Mat fundamentalMatrix)
        {
            Mat output;
            std::vector<Vec2f> points = {point};
            computeCorrespondEpilines(points, 1, fundamentalMatrix, output);
            Vec3f line = {(float)output.at<Vec3f>(0, 0)[0], (float)output.at<Vec3f>(0, 0)[1], (float)output.at<Vec3f>(0, 0)[2]};
            return line;
        }

        Vec3f getEpipolarline(Vec2f point, Mat fundamentalMatrix)
        {
            float a = fundamentalMatrix.at<float>(0, 0) * point[0] + fundamentalMatrix.at<float>(0, 1) * point[1] + fundamentalMatrix.at<float>(0, 2) * 1.0f;
            float b = fundamentalMatrix.at<float>(1, 0) * point[0] + fundamentalMatrix.at<float>(1, 1) * point[1] + fundamentalMatrix.at<float>(1, 2) * 1.0f;
            float c = fundamentalMatrix.at<float>(2, 0) * point[0] + fundamentalMatrix.at<float>(2, 1) * point[1] + fundamentalMatrix.at<float>(2, 2) * 1.0f;
            float nu = a * a + b * b;
            nu = nu ? 1. / sqrt(nu) : 1.;
            a *= nu;
            b *= nu;
            c *= nu;
            Vec3f line{a, b, c};
            return line;
        }

        Vec2f lineIntersect(Vec3f line1, Vec3f line2)
        {
            // Vec3f point = line1.cross(line2);
            Vec3f point;
            point[0] = line1[1] * line2[2] - line1[2] * line2[1];
            point[1] = line1[2] * line2[0] - line1[0] * line2[2];
            point[2] = line1[0] * line2[1] - line1[1] * line2[0];
            return Vec2f(point[0] / point[2], point[1] / point[2]);
        }

        Vec2f undistortPoint(Vec2f p, Intrinsic &intrinsic)
        {
            int iters = 5;
            float x = p[1];
            float y = p[0];
            float fx = intrinsic.fx;
            float fy = intrinsic.fy;
            float ifx = 1. / fx;
            float ify = 1. / fy;
            float cx = intrinsic.tx;
            float cy = intrinsic.ty;

            float u = x;
            float v = y;
            float x0 = (x - cx) * ifx;
            float y0 = (y - cy) * ify;
            x = x0;
            y = y0;

            float k[8] = {0};
            for (int i = 0; i < 5; i++)
            {
                k[i] = intrinsic.kc[i];
            }

            for (int j = 0; j < iters; j++)
            {
                double r2 = x * x + y * y;
                double icdist = (1 + ((k[7] * r2 + k[6]) * r2 + k[5]) * r2) / (1 + ((k[4] * r2 + k[1]) * r2 + k[0]) * r2);
                double deltaX = 2 * k[2] * x * y + k[3] * (r2 + 2 * x * x);
                double deltaY = k[2] * (r2 + 2 * y * y) + 2 * k[3] * x * y;
                x = (x0 - deltaX) * icdist;
                y = (y0 - deltaY) * icdist;
            }

            double xx = intrinsic.matrix.at<float>(0, 0) * x + intrinsic.matrix.at<float>(0, 1) * y + intrinsic.matrix.at<float>(0, 2);
            double yy = intrinsic.matrix.at<float>(1, 0) * x + intrinsic.matrix.at<float>(1, 1) * y + intrinsic.matrix.at<float>(1, 2);
            double ww = 1. / (intrinsic.matrix.at<float>(2, 0) * x + intrinsic.matrix.at<float>(2, 1) * y + intrinsic.matrix.at<float>(2, 2));
            x = xx * ww;
            y = yy * ww;
            return Vec2f(y, x);
        }

        Vec2f undistortPointCv(Vec2f p, Intrinsic &instrinsic)
        {
            Mat point(1, 1, CV_64FC2);
            point.at<cv::Vec2d>(0, 0) = cv::Vec2d(p[1], p[0]);
            Mat pointUndistorted;
            undistortPoints(point, pointUndistorted, instrinsic.matrix, instrinsic.kc, noArray(), instrinsic.matrix);
            float xUndistorted = (float)pointUndistorted.at<Vec2d>(0, 0)[1];
            float yUndistorted = (float)pointUndistorted.at<Vec2d>(0, 0)[0];
            return Vec2f(xUndistorted, yUndistorted);
        }

        float length(Vec3f p)
        {
            return sqrt(p[0] * p[0] + p[1] * p[1] + p[2] * p[2]);
        }

        Vec3f normalize(Vec3f p)
        {
            float length = sqrt(p[0] * p[0] + p[1] * p[1] + p[2] * p[2]);
            return p / length;
        }

        float getAngleCos(Vec3f p1, Vec3f p2)
        {
            return normalize(p1).dot(normalize(p2));
        }

        float getAngle(Vec3f p1, Vec3f p2)
        {
            return acos(normalize(p1).dot(normalize(p2)));
        }

        void multiply_44_41(float *mat, float *p, float *r)
        {
            r[0] = mat[0] * p[0] + mat[1] * p[1] + mat[2] * p[2] + mat[3] * p[3];
            r[1] = mat[4] * p[0] + mat[5] * p[1] + mat[6] * p[2] + mat[7] * p[3];
            r[2] = mat[8] * p[0] + mat[9] * p[1] + mat[10] * p[2] + mat[11] * p[3];
            r[3] = mat[12] * p[0] + mat[13] * p[1] + mat[14] * p[2] + mat[15] * p[3];
        }

        void multiply_33_31(float *mat, float *p, float *r)
        {
            r[0] = mat[0] * p[0] + mat[1] * p[1] + mat[2] * p[2];
            r[1] = mat[3] * p[0] + mat[4] * p[1] + mat[5] * p[2];
            r[2] = mat[6] * p[0] + mat[7] * p[1] + mat[8] * p[2];
        }

        Point3f triangulatePoint(Vec2f cameraPoint, Vec2f projectorPoint, ProjectorCalibrateTrinsics &projectorCalibrateTrinics)
        {
            Intrinsic cameraIntrinsic = projectorCalibrateTrinics.cameraIntrinsic;
            Intrinsic projectorIntrinsic = projectorCalibrateTrinics.projectorIntrinsic;
            Extrinsic extrinsic = projectorCalibrateTrinics.extrinsic;

            Vec3f projectorPoint3d = {0, 0, 0};

            float projectorDirectionX = (projectorPoint[0] - projectorIntrinsic.tx) / projectorIntrinsic.fx;
            float projectorDirectionY = (projectorPoint[1] - projectorIntrinsic.ty) / projectorIntrinsic.fy;
            Vec3f projectorDirection = {projectorDirectionX, projectorDirectionY, 1};
            projectorDirection = normalize(projectorDirection);

            Vec4f cameraPoint4d = {0, 0, 0, 1};
            // float cameraPoint4dMatPtr[4];
            // float *matrix = (float *)extrinsic.matrix.data;
            // float cameraPointPtr[4];
            // cameraPointPtr[0] = cameraPoint4d[0];
            // cameraPointPtr[1] = cameraPoint4d[1];
            // cameraPointPtr[2] = cameraPoint4d[2];
            // cameraPointPtr[3] = cameraPoint4d[3];
            // multiply_44_41(matrix, cameraPointPtr, cameraPoint4dMatPtr);
            // Mat cameraPoint4dMat(4, 1, CV_32FC1, cameraPoint4dMatPtr);
            Mat cameraPoint4dMat = extrinsic.matrix * Mat(cameraPoint4d);
            Vec3f cameraPoint3d = {cameraPoint4dMat.at<float>(0, 0), cameraPoint4dMat.at<float>(1, 0), cameraPoint4dMat.at<float>(2, 0)};

            float cameraDirectionX = (cameraPoint[0] - cameraIntrinsic.tx) / cameraIntrinsic.fx;
            float cameraDirectionY = (cameraPoint[1] - cameraIntrinsic.ty) / cameraIntrinsic.fy;
            Vec3f cameraDirection3d = {cameraDirectionX, cameraDirectionY, 1};
            // float cameraDirection4dMatPtr[3];
            // float *rotation = (float *)extrinsic.rotation.data;
            // Vec3f normalizedCameraDirection3d = normalize(cameraDirection3d);
            // float normalizedCameraDirection3dPtr[3];
            // normalizedCameraDirection3dPtr[0] = normalizedCameraDirection3d[0];
            // normalizedCameraDirection3dPtr[1] = normalizedCameraDirection3d[1];
            // normalizedCameraDirection3dPtr[2] = normalizedCameraDirection3d[2];
            // multiply_33_31(rotation, normalizedCameraDirection3dPtr, cameraDirection4dMatPtr);
            // Mat cameraDirection4dMat(3, 1, CV_32FC1, cameraDirection4dMatPtr);
            Mat cameraDirection4dMat = extrinsic.rotation * Mat(normalize(cameraDirection3d));
            Vec3f cameraDirection = {cameraDirection4dMat.at<float>(0, 0), cameraDirection4dMat.at<float>(1, 0), cameraDirection4dMat.at<float>(2, 0)};
            cameraDirection = normalize(cameraDirection);

            Vec3f normal = normalize(cameraDirection.cross(projectorDirection));
            float originDistance = length(projectorPoint3d - cameraPoint3d);
            Vec3f originVec = cameraPoint3d - projectorPoint3d;
            Vec3f originVecNormalized = originVec / originDistance;
            Vec3f projectorPointInCameraPlane = cameraPoint3d - originVec - originDistance * abs(getAngleCos(originVec, normal)) * normal;
            Vec3f cameraPointInProjectorPlane = projectorPoint3d + originVec + originDistance * abs(getAngleCos(originVec, normal)) * normal;

            float theta1 = getAngle(projectorPointInCameraPlane - cameraPoint3d, cameraDirection);
            float theta2 = PI - getAngle(projectorPointInCameraPlane - cameraPoint3d, projectorDirection);
            float theta3 = PI - theta1 - theta2;
            float x = length(projectorPointInCameraPlane - cameraPoint3d) / sin(theta3) * sin(theta2);
            Vec3f p1 = cameraPoint3d + x * cameraDirection;

            theta1 = getAngle(projectorPoint3d - cameraPointInProjectorPlane, cameraDirection);
            theta2 = PI - getAngle(projectorPoint3d - cameraPointInProjectorPlane, projectorDirection);
            theta3 = PI - theta1 - theta2;
            x = length(cameraPointInProjectorPlane - projectorPoint3d) / sin(theta3) * sin(theta2);
            Vec3f p2 = cameraPointInProjectorPlane + x * cameraDirection;

            Point3f point = {(p1[0] + p2[0]) / 2, (p1[1] + p2[1]) / 2, (p1[2] + p2[2]) / 2};
            return point;
        }

        Point3f triangulatePointCv(Vec2f cameraPoint, Vec2f projectorPoint, Mat cameraProjectionMatrix, Mat projectorProjectionMatrix)
        {
            Mat cameraPointForTriangulation = Mat::zeros(2, 1, CV_32FC1);
            cameraPointForTriangulation.at<float>(0, 0) = cameraPoint[0];
            cameraPointForTriangulation.at<float>(1, 0) = cameraPoint[1];
            Mat projectorPointForTriangulation = Mat::zeros(2, 1, CV_32FC1);
            projectorPointForTriangulation.at<float>(0, 0) = projectorPoint[0];
            projectorPointForTriangulation.at<float>(1, 0) = projectorPoint[1];

            Mat output;
            triangulatePoints(cameraProjectionMatrix, projectorProjectionMatrix, cameraPointForTriangulation, projectorPointForTriangulation, output);
            float _x = output.at<float>(0, 0);
            float _y = output.at<float>(1, 0);
            float _z = output.at<float>(2, 0);
            float _w = output.at<float>(3, 0);
            Point3f point = {_x / _w, _y / _w, _z / _w};
            return point;
        }

        void generateProjectionMatrix(ProjectorCalibrateTrinsics &projectorCalibrateTrinics, Mat &cameraProjectionMatrix, Mat &projectorProjectionMatrix)
        {
            cameraProjectionMatrix = Mat::zeros(3, 4, CV_32FC1);
            projectorProjectionMatrix = Mat::zeros(3, 4, CV_32FC1);
            Intrinsic &cameraIntrinsic = projectorCalibrateTrinics.cameraIntrinsic;
            Intrinsic &projectorIntrinsic = projectorCalibrateTrinics.projectorIntrinsic;
            cameraIntrinsic.matrix(Range(0, 3), Range(0, 3)).copyTo(cameraProjectionMatrix(Range(0, 3), Range(0, 3)));
            projectorIntrinsic.matrix(Range(0, 3), Range(0, 3)).copyTo(projectorProjectionMatrix(Range(0, 3), Range(0, 3)));
            projectorProjectionMatrix = projectorProjectionMatrix * projectorCalibrateTrinics.extrinsic.matrix;
        }

        void reconstructPhaseShiftMonocular(Mat &image, Mat &decodeImage, Size imageSize, Size patternSize, ProjectorCalibrateTrinsics &projectorCalibrateTrinics, float minDistance, float maxDistance, Points &points, Colors &colors, int rowGap = 1, int colGap = 1)
        {
            Mat cameraProjectionMatrix, projectorProjectionMatrix;
            generateProjectionMatrix(projectorCalibrateTrinics, cameraProjectionMatrix,
                                     projectorProjectionMatrix);
            Mat &fundamental = projectorCalibrateTrinics.extrinsic.fundamental;

            Size size = decodeImage.size();

            for (int h = 0; h < size.height; h += rowGap)
            {
                // std::cout << h << std::endl;
                float *decodeImageRow = decodeImage.ptr<float>(h);
                for (int w = 0; w < size.width; w += colGap)
                {
                    float x = decodeImageRow[w];
                    if (isnan(x))
                    {
                        continue;
                    }
                    Vec3f xLine = {1, 0, -x};
                    Vec2f cameraPoint = Vec2f((float)w, (float)h);
                    Vec3f epipolarLine = getEpipolarline(cameraPoint, fundamental);
                    // std::cout << fundamental << std::endl;
                    // std::cout << fundamental.at<float>(0, 0) << "," << fundamental.at<float>(0, 1) << std::endl;
                    Vec2f projectorPoint = lineIntersect(xLine, epipolarLine);

                    Vec2f cameraPointUndistorted = undistortPoint(cameraPoint, projectorCalibrateTrinics.cameraIntrinsic);
                    Vec2f projectorPointUndistorted = undistortPoint(projectorPoint, projectorCalibrateTrinics.projectorIntrinsic);
                    Point3f reconstructPoint = triangulatePoint(cameraPointUndistorted, projectorPointUndistorted, projectorCalibrateTrinics);

                    // Vec2f cameraPointUndistorted = undistortPointCv(cameraPoint, projectorCalibrateTrinics.cameraIntrinsic);
                    // Vec2f projectorPointUndistorted = undistortPointCv(projectorPoint, projectorCalibrateTrinics.projectorIntrinsic);
                    // Point3f reconstructPoint2 = triangulatePointCv(cameraPoint, projectorPoint, cameraProjectionMatrix, projectorProjectionMatrix);
                    if (reconstructPoint.z > maxDistance || reconstructPoint.z < minDistance || isnan(reconstructPoint.z))
                        continue;
                    points.push_back(reconstructPoint);

                    // Vec3b color = image.at<Vec3b>(h, w);
                    // Point3f colorPoint = {(float)color[0], (float)color[0], (float)color[0]};
                    uchar color = image.at<uchar>(h, w);
                    Color colorPoint = {color, color, color};
                    colors.push_back(colorPoint);
                }
            }
        }

#if USECUDA
        void reconstructPhaseShiftMonocularCuda(Mat &image, Mat &decodeImage, Size imageSize, Size patternSize, ProjectorCalibrateTrinsics &projectorCalibrateTrinics, float minDistance, float maxDistance, Points &points, Colors &colors)
        {
            SLCuda::reconstructPhaseShiftMonocularCuda(image, decodeImage, imageSize, patternSize, projectorCalibrateTrinics, minDistance, maxDistance, points, colors);
        }
#endif
        void reconstructBrown(Mat &image, Mat &decodeImage, Size imageSize, Size patternSize, ProjectorCalibrateTrinsics &projectorCalibrateTrinics, Points &points, Colors &colors)
        {
            Intrinsic &cameraIntrinsic = projectorCalibrateTrinics.cameraIntrinsic;
            Intrinsic &projectorIntrinsic = projectorCalibrateTrinics.projectorIntrinsic;
            Mat cameraProjectionMatrix, projectorProjectionMatrix;
            generateProjectionMatrix(projectorCalibrateTrinics, cameraProjectionMatrix,
                                     projectorProjectionMatrix);

            Size size = imageSize;
            Mat decodeImageCache = Mat::zeros(patternSize, CV_32FC3);
            // #pragma omp parallel for
            for (int h = 0; h < size.height; h++)
            {
                const Vec2f *decodeImageRow = decodeImage.ptr<Vec2f>(h);
                for (int w = 0; w < size.width; w++)
                {
                    float _x = decodeImageRow[w][0];
                    float _y = decodeImageRow[w][1];
                    if (_x != _x || _y != _y)
                    {
                        continue;
                    }
                    int x = int(_x);
                    int y = int(_y);
                    if (x < 0 || x > patternSize.width - 1 || y < 0 || y > patternSize.height - 1)
                        continue;
                    if (true)
                    {
                        decodeImageCache.at<Vec3f>(y, x)[0] += w;
                        decodeImageCache.at<Vec3f>(y, x)[1] += h;
                        decodeImageCache.at<Vec3f>(y, x)[2] += 1.0f;
                    }
                }
            }

            for (int h = 0; h < patternSize.height; h = h + 1)
            {
                std::cout << h << std::endl;
                const Vec3f *decodeImageCacheRow = decodeImageCache.ptr<Vec3f>(h);
                for (int w = 0; w < patternSize.width; w = w + 1)
                {
                    float x = decodeImageCacheRow[w][0];
                    float y = decodeImageCacheRow[w][1];
                    float n = decodeImageCacheRow[w][2];
                    if (n < 1)
                        continue;
                    if (isnan(x) || isnan(y))
                        continue;
                    x = x / n;
                    y = y / n;

                    Vec2f cameraPoint = Vec2f((float)x, (float)y);
                    Vec2f projectorPoint = Vec2f((float)w, (float)h);
                    Vec2f cameraPointUndistorted = undistortPoint(cameraPoint, projectorCalibrateTrinics.cameraIntrinsic);
                    Vec2f projectorPointUndistorted = undistortPoint(projectorPoint, projectorCalibrateTrinics.projectorIntrinsic);
                    Point3f reconstructPoint = triangulatePointCv(cameraPointUndistorted, projectorPointUndistorted, cameraProjectionMatrix, projectorProjectionMatrix);
                    // Point3f reconstructPoint = triangulatePointCv(cameraPoint, projectorPoint, cameraProjectionMatrix, projectorProjectionMatrix);
                    points.push_back(reconstructPoint);

                    int colorX = (int)x;
                    int colorY = (int)y;
                    if (colorX < 0 || colorX > image.size().width - 1)
                    {
                        Color colorPoint = {0, 0, 0};
                        colors.push_back(colorPoint);
                        continue;
                    }
                    if (colorY < 0 || colorY > image.size().height - 1)
                    {
                        Color colorPoint = {0, 0, 0};
                        colors.push_back(colorPoint);
                        continue;
                    }
                    uchar color = image.at<uchar>(colorY, colorX);
                    Color colorPoint = {color, color, color};
                    colors.push_back(colorPoint);
                    // uint8_t _color = image.at<uint8_t>(colorY, colorX);

                    // colors.push_back(color);
                }
            }
            std::cout << "Number of Points: " << points.size() << std::endl;
        }

        void reconstructPhaseShiftBiocular(Mat &rectifiedDecodeImageL, Mat &rectifiedDecodeImageR, Mat grayImageL, BiocularCalibrateTrinsics &biocularCalibrateTrinsics, Mat &projectionMatrix, Points &points, Points &colors, Mat &disparity, Mat &depth)
        {
            Size size = rectifiedDecodeImageL.size();
            disparity = Mat::zeros(size, CV_32F);
            depth = Mat::zeros(size, CV_32FC3);
            int gap = 1;
            for (int h = 0; h < size.height; h += gap)
            {
                float *decodeImageLRow = rectifiedDecodeImageL.ptr<float>(h);
                float *decodeImageRRow = rectifiedDecodeImageR.ptr<float>(h);
                float *disparityRow = disparity.ptr<float>(h);
                for (int wl = 0; wl < size.width; wl += gap)
                {
                    float x = decodeImageLRow[wl];
                    if (x <= 0 || isnan(x))
                        continue;
                    for (int wr = wl; wr > 0; wr--)
                    {
                        float r = decodeImageRRow[wr];
                        float l = decodeImageRRow[wr - 1];
                        if (isnan(l) || isnan(r))
                            continue;
                        if (l <= x && x <= r)
                        {
                            // float wrInterplotation = wr - (r - x) / (r - l);
                            float wrInterplotation = (float)wr;
                            disparityRow[wl] = (float)wl - wrInterplotation;
                        }
                    }
                }
            }
            for (int h = 0; h < size.height; h += gap)
            {
                float *disparityRow = disparity.ptr<float>(h);
                Vec3f *depthRow = disparity.ptr<Vec3f>(h);
                for (int w = 0; w < size.width; w += gap)
                {
                    float x = (float)w;
                    float y = (float)h;
                    float d = disparityRow[w];
                    if (d == 0)
                        continue;
                    float pw = -1.0f * d * (float)projectionMatrix.at<double>(3, 2) + (float)projectionMatrix.at<double>(3, 3);
                    float px = x + (float)projectionMatrix.at<double>(0, 3);
                    float py = y + (float)projectionMatrix.at<double>(1, 3);
                    float pz = (float)projectionMatrix.at<double>(2, 3);
                    px = px / pw;
                    py = py / pw;
                    pz = pz / pw;
                    Vec3f p = {px, py, pz};
                    // depthRow[w] = p;
                    depth.at<Vec3f>(h, w) = p;
                }
            }
            // reprojectImageTo3D(disparity, depth, projectionMatrix);
            for (int h = 0; h < size.height; h += gap)
            {
                Vec3f *depthRow = depth.ptr<Vec3f>(h);
                uchar *colorRow = grayImageL.ptr<uchar>(h);
                for (int w = 0; w < size.width; w += gap)
                {
                    Vec3f point = depthRow[w];
                    if (isnan(point[0]) || point[2] <= 0)
                        continue;
                    Point3f reconstructPoint = {point[0], point[1], point[2]};
                    points.push_back(reconstructPoint);
                    float color = (float)colorRow[w];
                    Point3f colorPoint = {color, color, color};
                    colors.push_back(colorPoint);
                }
            }
        }
        // void reconstructBrown(Mat &image, Mat &decodeImage, Size imageSize, Size patternSize, ProjectorCalibrateTrinsics &projectorCalibrateTrinics, Points &points, std::vector<uint8_t> &colors)
        // {
        //     Mat cameraProjectionMatrix = Mat::zeros(3, 4, CV_32FC1);
        //     Mat projectorProjectionMatrix = Mat::zeros(3, 4, CV_32FC1);
        //     Intrinsic &cameraIntrinsic = projectorCalibrateTrinics.cameraIntrinsic;
        //     Intrinsic &projectorIntrinsic = projectorCalibrateTrinics.projectorIntrinsic;
        //     cameraIntrinsic.matrix(Range(0, 3), Range(0, 3)).copyTo(cameraProjectionMatrix(Range(0, 3), Range(0, 3)));
        //     // cameraProjectionMatrix = imageProjectionMatrix * projectorCalibrateTrinics.extrinsic.matrix;

        //     projectorIntrinsic.matrix(Range(0, 3), Range(0, 3)).copyTo(projectorProjectionMatrix(Range(0, 3), Range(0, 3)));
        //     projectorProjectionMatrix = projectorProjectionMatrix * projectorCalibrateTrinics.extrinsic.matrix;

        //     Size size = imageSize;
        //     Mat decodeImageCache = Mat::zeros(patternSize, CV_32FC3);
        //     // #pragma omp parallel for
        //     for (int h = 0; h < size.height; h++)
        //     {
        //          const Vec2f *decodeImageRow = decodeImage.ptr<Vec2f>(h);
        //         for (int w = 0; w < size.width; w++)
        //         {
        //             float _x = decodeImageRow[w][0];
        //             float _y = decodeImageRow[w][1];
        //             if (_x != _x || _y != _y)
        //             {
        //                 continue;
        //             }
        //             int x = int(_x);
        //             int y = int(_y);
        //             if (x < 0 || x > patternSize.width - 1 || y < 0 || y > patternSize.height - 1)
        //                 continue;
        //             if (true)
        //             {
        //                 decodeImageCache.at<Vec3f>(y, x)[0] += w;
        //                 decodeImageCache.at<Vec3f>(y, x)[1] += h;
        //                 decodeImageCache.at<Vec3f>(y, x)[2] += 1.0f;
        //             }
        //         }
        //     }

        //     for (int h = 0; h < patternSize.height; h++)
        //     {
        //         std::cout << h << std::endl;
        //          const Vec3f *decodeImageCacheRow = decodeImageCache.ptr<Vec3f>(h);
        //         for (int w = 0; w < patternSize.width; w++)
        //         {
        //             float x = decodeImageCacheRow[w][0];
        //             float y = decodeImageCacheRow[w][1];
        //             float n = decodeImageCacheRow[w][2];
        //             if (n < 5)
        //                 continue;
        //             x = x / n;
        //             y = y / n;

        //             Mat cameraPoint(1, 1, CV_64FC2), projectorPoint(1, 1, CV_64FC2);
        //             cameraPoint.at<cv::Vec2d>(0, 0) = cv::Vec2d(y, x);
        //             projectorPoint.at<cv::Vec2d>(0, 0) = cv::Vec2d(h, w);
        //             Mat cameraPointUndistorted, projectorPointUndistorted, newCameraMatrix, newProjectorMatrix;
        //             undistortPoints(cameraPoint, cameraPointUndistorted, cameraIntrinsic.matrix, cameraIntrinsic.kc, noArray(), cameraIntrinsic.matrix);
        //             undistortPoints(projectorPoint, projectorPointUndistorted, projectorIntrinsic.matrix, projectorIntrinsic.kc, noArray(), projectorIntrinsic.matrix);
        //             // cameraPointUndistorted = cameraPoint;
        //             // projectorPointUndistorted = projectorPoint;

        //             Mat cameraPointForTriangulation = Mat::zeros(2, 1, CV_32FC1);
        //             cameraPointForTriangulation.at<float>(0, 0) = (float)cameraPointUndistorted.at<Vec2d>(0, 0)[1]; // * newCameraMatrix.at<float>(0, 0) + newCameraMatrix.at<float>(0, 2);
        //             cameraPointForTriangulation.at<float>(1, 0) = (float)cameraPointUndistorted.at<Vec2d>(0, 0)[0]; //* newCameraMatrix.at<float>(1, 1) + newCameraMatrix.at<float>(1, 2);
        //             Mat projectorPointForTriangulation = Mat::zeros(2, 1, CV_32FC1);
        //             projectorPointForTriangulation.at<float>(0, 0) = (float)projectorPointUndistorted.at<Vec2d>(0, 0)[1]; // * newProjectorMatrix.at<float>(0, 0) + newProjectorMatrix.at<float>(0, 2);
        //             projectorPointForTriangulation.at<float>(1, 0) = (float)projectorPointUndistorted.at<Vec2d>(0, 0)[0]; //* newProjectorMatrix.at<float>(1, 1) + newProjectorMatrix.at<float>(1, 2);

        //             // Mat cameraPointForTriangulation = Mat::zeros(2, 1, CV_32FC1);
        //             // cameraPointForTriangulation.at<float>(0, 0) = x;
        //             // cameraPointForTriangulation.at<float>(1, 0) = y;
        //             // Mat projectorPointForTriangulation = Mat::zeros(2, 1, CV_32FC1);
        //             // projectorPointForTriangulation.at<float>(0, 0) = (float)w;
        //             // projectorPointForTriangulation.at<float>(1, 0) = (float)h;
        //             Mat output;
        //             triangulatePoints(cameraProjectionMatrix, projectorProjectionMatrix, cameraPointForTriangulation, projectorPointForTriangulation, output);
        //             float _x = output.at<float>(0, 0);
        //             float _y = output.at<float>(1, 0);
        //             float _z = output.at<float>(2, 0);
        //             float _w = output.at<float>(3, 0);
        //             Point3d point = {(double)_x / _w, (double)_y / _w, (double)_z / _w};
        //             points.push_back(point);

        //             int colorX = (int)x;
        //             int colorY = (int)y;
        //             if (colorX < 0 || colorX > image.size().width - 1)
        //             {
        //                 colors.push_back(0);
        //                 continue;
        //             }
        //             if (colorY < 0 || colorY > image.size().height - 1)
        //             {
        //                 colors.push_back(0);
        //                 continue;
        //             }
        //             uint8_t color = image.at<Vec3b>(colorY, colorX)[0];
        //             colors.push_back(color);
        //         }
        //     }
        //     std::cout << "Number of Points: " << points.size() << std::endl;
        // }

        void savePly(std::string filename, Points &points, std::vector<uint8_t> &colors)
        {
            std::ofstream outfile(filename);
            outfile << "ply\n"
                    << "format ascii 1.0\n"
                    << "element vertex " << points.size() << "\n";
            outfile << "property float x\n"
                    << "property float y\n"
                    << "property float z\n"
                    << "property uchar red\n"
                    << "property uchar green\n"
                    << "property uchar blue\n"
                    << "element face 0\n";
            outfile << "property list uchar int vertex_indices\n"
                    << "end_header\n";
            double min = 255;
            double max = 0;
            for (auto color : colors)
            {
                min = min > color ? color : min;
                max = max < color ? color : max;
            }
            double scale = 220 / (max - min);
            for (int i = 0; i < points.size(); i++)
            {
                Point3d point = points.at(i);
                uint8_t color = colors[i];
                outfile << point.x << " ";
                outfile << point.y << " ";
                outfile << point.z << " ";
                outfile << (int)((color - min) * scale) << " ";
                outfile << (int)((color - min) * scale) << " ";
                outfile << (int)((color - min) * scale) << " ";
                outfile << "\n";
            }
            outfile.close();
        }

        void savePly(std::string filename, Points &points, Points &colors)
        {
            // std::ofstream outfile(filename);
            // outfile << "ply\n"
            //         << "format ascii 1.0\n"
            //         << "element vertex " << points.size() << "\n";
            // outfile << "property float x\n"
            //         << "property float y\n"
            //         << "property float z\n"
            //         << "property uchar red\n"
            //         << "property uchar green\n"
            //         << "property uchar blue\n"
            //         << "element face 0\n";
            // outfile << "property list uchar int vertex_indices\n"
            //         << "end_header\n";
            // for (int i = 0; i < points.size(); i++)
            // {
            //     Point3d point = points.at(i);
            //     Point3d color = colors[i];
            //     // if (point.z < -100 || point.z > 100)
            //     //     continue;
            //     outfile << point.x << " ";
            //     outfile << point.y << " ";
            //     outfile << point.z << " ";
            //     outfile << (int)color.x << " ";
            //     outfile << (int)color.y << " ";
            //     outfile << (int)color.z << " ";
            //     outfile << "\n";
            // }
            // outfile.close();

            std::string x;
            x += "ply\nformat ascii 1.0\nelement vertex ";
            x += std::to_string(points.size());
            x += "\n";
            x += "property float x\n";
            x += "property float y\n";
            x += "property float z\n";
            x += "property uchar red\n";
            x += "property uchar green\n";
            x += "property uchar blue\n";
            x += "element face 0\n";
            x += "property list uchar int vertex_indices\n";
            x += "end_header\n";
            for (int i = 0; i < points.size(); i++)
            {
                Point3d point = points.at(i);
                Point3d color = colors[i];
                x += std::to_string(point.x) + " ";
                x += std::to_string(point.y) + " ";
                x += std::to_string(point.z) + " ";
                x += std::to_string((int)color.x) + " ";
                x += std::to_string((int)color.y) + " ";
                x += std::to_string((int)color.z) + " ";
                x += "\n";
            }
            std::ofstream outfile(filename);
            outfile << x;
            outfile.close();
        }

        void savePly(std::string filename, Points &points, Colors &colors)
        {
            std::string x;
            x += "ply\nformat ascii 1.0\nelement vertex ";
            x += std::to_string(points.size());
            x += "\n";
            x += "property float x\n";
            x += "property float y\n";
            x += "property float z\n";
            x += "property uchar red\n";
            x += "property uchar green\n";
            x += "property uchar blue\n";
            x += "element face 0\n";
            x += "property list uchar int vertex_indices\n";
            x += "end_header\n";
            for (int i = 0; i < points.size(); i++)
            {
                Point3d point = points.at(i);
                auto color = colors[i];
                x += std::to_string(point.x) + " ";
                x += std::to_string(point.y) + " ";
                x += std::to_string(point.z) + " ";
                x += std::to_string((int)color[0]) + " ";
                x += std::to_string((int)color[1]) + " ";
                x += std::to_string((int)color[2]) + " ";
                x += "\n";
            }
            std::ofstream outfile(filename);
            outfile << x;
            outfile.close();
        }

        void savePlyBinary(std::string filename, Points &points, Colors &colors)
        {
            slLog.log("Number of Points: %d", points.size());
            FILE *plyFile = fopen(filename.c_str(), "wb");
            fprintf(plyFile, "ply\n");
            fprintf(plyFile, "format binary_little_endian 1.0\n");
            fprintf(plyFile, "element vertex %zu\n", points.size());
            fprintf(plyFile, "property float x\n");
            fprintf(plyFile, "property float y\n");
            fprintf(plyFile, "property float z\n");
            fprintf(plyFile, "property uchar red\n");
            fprintf(plyFile, "property uchar green\n");
            fprintf(plyFile, "property uchar blue\n");
            fprintf(plyFile, "element face 0\n");
            fprintf(plyFile, "property list uchar int vertex_indices\n");
            fprintf(plyFile, "end_header\n");

            for (int i = 0; i < points.size(); i++)
            {
                auto point = points[i];
                auto color = colors[i];
                fwrite(&point, 3 * sizeof(float), 1, plyFile);
                fwrite(&color, 3 * sizeof(uchar), 1, plyFile);
            }

            fclose(plyFile);
        }
    }
}