#include "stereo_camera_ranging.h"

// 在头文件中定义全局变量这里是有问题的，记得改
static int g_thresh = 50, g_N = 5;

cv::Mat g_left_camera_Intrinsic_matrix = (cv::Mat_<double>(3,3) << 538.7635 , 0, 0,
                                          0.2824, 539.0124, 0, 650.2173, 358.6647, 1);

cv::Mat g_right_camera_intrinsic_matrix = (cv::Mat_<double>(3,3) << 539.7048, 0, 0,
                                           0.1812, 539.6816, 0, 644.2358, 365.9476, 1);

cv::Mat g_left_camera_distCoeff = (cv::Mat_<double>(5,1) << -0.0981, 0.1230, -0.0649, 0.000012344, 0.00019051);

cv::Mat g_right_camera_distCoeff = (cv::Mat_<double>(5,1) << -0.1107, 0.1564, -0.0897, 0.00030057, 0.0004703);

cv::Mat g_rotation_matrix = (cv::Mat_<double>(3,3) << 1.0000, -0.00030063, -0.0039, 0.00030764, 1.0000, 0.0018, 0.0039, -0.0018, 1.0000);

cv::Mat g_translation_matrix = (cv::Mat_<double>(3,1) << -117.2335, -0.0190, 0.0068);

cv::Mat g_transformation_matrix = (cv::Mat_<double>(4,4) << 1.0000, -0.00030063, -0.0039,  -117.2335,
                                   0.00030764, 1.0000, 0.0018, -0.0190,0.0039, -0.0018, 1.0000, 0.0068);

cv::Mat g_left_rotation_matrix = (cv::Mat_<double>(3,3) << 1.0000, 0, 0, 0, 1.0000, 0, 0, 0, 1.0000);

cv::Mat g_left_translation_matrix = (cv::Mat_<double>(3,3) << 0,0,0);

stereo_camera_ranging::stereo_camera_ranging(std::string left_image_path, std::string right_image_path)
    :m_left_image_path(left_image_path), m_right_image_path(right_image_path)
{
    this->m_left_image = cv::imread(m_left_image_path);
    this->m_right_image = cv::imread(m_right_image_path);

    this->m_object_camera_coordiante = ImageSegment(this->m_left_image, this->m_right_image);
    this->m_object_world_coordinate = CoordinateCalculate(this->m_object_camera_coordiante);
}

stereo_camera_ranging::~stereo_camera_ranging()
{

}

S_OBJECT_CAMERA_COORDINATE stereo_camera_ranging::ImageSegment(cv::Mat left_image, cv::Mat right_image)
{
    S_OBJECT_CAMERA_COORDINATE result;
    findSquares(left_image, this->m_left_image_point);
    findSquares(right_image, this->m_right_image_vector);

    assert(this->m_left_image_point.size() == 1);
    assert(this->m_left_image_point.size() == 1);

    const cv::Point left_tl = this->m_left_image_point[0][0];
    const cv::Point left_tr = this->m_left_image_point[0][1];
    const cv::Point left_bl = this->m_left_image_point[0][2];
    const cv::Point left_br = this->m_left_image_point[0][3];

    const cv::Point right_tl = this->m_left_image_point[0][0];
    const cv::Point right_tr = this->m_left_image_point[0][1];
    const cv::Point right_bl = this->m_left_image_point[0][2];
    const cv::Point right_br = this->m_left_image_point[0][3];

    result.left_x = (left_bl.x + left_br.x + left_tl.x + left_tr.x) / 4;
    result.right_x = (right_bl.x + right_br.x + right_tl.x + right_tr.x) / 4;
    result.left_y = (left_bl.y + left_br.y + left_tl.y + left_tr.y) / 4;
    result.right_y = (right_bl.y + right_br.y + right_tl.y + right_tr.y) / 4;

    return result;
}

cv::Point3d stereo_camera_ranging::CoordinateCalculate(S_OBJECT_CAMERA_COORDINATE camera_coordiante)
{
    cv::Mat left_param_matrix = g_left_camera_Intrinsic_matrix * g_transformation_matrix;
    cv::Mat right_param_matrix = g_right_camera_intrinsic_matrix * g_transformation_matrix;
    cv::Mat left_camera_coordinate_matrix = (cv::Mat_<double>(3,1) << this->m_object_camera_coordiante.left_x,
                                             this->m_object_camera_coordiante.left_y, 1);
    cv::Mat right_camera_coordinate_matrix = (cv::Mat_<double>(3,1) << this->m_object_camera_coordiante.right_x,
                                              this->m_object_camera_coordiante.right_y, 1);

    cv::Point2d left_camera_point, right_camera_point;
    left_camera_point.x = camera_coordiante.left_x;
    left_camera_point.y = camera_coordiante.left_y;
    right_camera_point.x = camera_coordiante.right_x;
    right_camera_point.y = camera_coordiante.right_y;
    cv::Point3d world_point = uv2xyz(left_camera_point, right_camera_point);
    return world_point;

}

static double angle( cv::Point pt1, cv::Point pt2, cv::Point pt0 )
{
    double dx1 = pt1.x - pt0.x;
    double dy1 = pt1.y - pt0.y;
    double dx2 = pt2.x - pt0.x;
    double dy2 = pt2.y - pt0.y;
    return (dx1*dx2 + dy1*dy2)/sqrt((dx1*dx1 + dy1*dy1)*(dx2*dx2 + dy2*dy2) + 1e-10);
}

// returns sequence of squares detected on the image.
// the sequence is stored in the specified memory storage
void stereo_camera_ranging::findSquares( const cv::Mat& image, POINT_VECTOR& squares )
{
    squares.clear();

//s    Mat pyr, timg, gray0(image.size(), CV_8U), gray;

    // down-scale and upscale the image to filter out the noise
    //pyrDown(image, pyr, Size(image.cols/2, image.rows/2));
    //pyrUp(pyr, timg, image.size());


    // blur will enhance edge detection
    cv::Mat timg(image);
    medianBlur(image, timg, 9);
    cv::Mat gray0(timg.size(), CV_8U), gray;

    POINT_VECTOR contours;

    // find squares in every color plane of the image
    for( int c = 0; c < 3; c++ )
    {
        int ch[] = {c, 0};
        mixChannels(&timg, 1, &gray0, 1, ch, 1);

        // try several threshold levels
        for( int l = 0; l < g_N; l++ )
        {
            // hack: use Canny instead of zero threshold level.
            // Canny helps to catch squares with gradient shading
            if( l == 0 )
            {
                // apply Canny. Take the upper threshold from slider
                // and set the lower to 0 (which forces edges merging)
                Canny(gray0, gray, 5, g_thresh, 5);
                // dilate canny output to remove potential
                // holes between edge segments
                dilate(gray, gray, cv::Mat(), cv::Point(-1,-1));
            }
            else
            {
                // apply threshold if l!=0:
                //     tgray(x,y) = gray(x,y) < (l+1)*255/N ? 255 : 0
                gray = gray0 >= (l+1)*255/g_N;
            }

            // find contours and store them all as a list
            findContours(gray, contours, cv::RETR_LIST, cv::CHAIN_APPROX_SIMPLE);

            std::vector<cv::Point> approx;

            // test each contour
            for( size_t i = 0; i < contours.size(); i++ )
            {
                // approximate contour with accuracy proportional
                // to the contour perimeter
                approxPolyDP(cv::Mat(contours[i]), approx, arcLength(cv::Mat(contours[i]), true)*0.02, true);

                // square contours should have 4 vertices after approximation
                // relatively large area (to filter out noisy contours)
                // and be convex.
                // Note: absolute value of an area is used because
                // area may be positive or negative - in accordance with the
                // contour orientation
                if( approx.size() == 4 &&
                    fabs(contourArea(cv::Mat(approx))) > 1000 &&
                    isContourConvex(cv::Mat(approx)) )
                {
                    double maxCosine = 0;

                    for( int j = 2; j < 5; j++ )
                    {
                        // find the maximum cosine of the angle between joint edges
                        double cosine = fabs(angle(approx[j%4], approx[j-2], approx[j-1]));
                        maxCosine = MAX(maxCosine, cosine);
                    }

                    // if cosines of all angles are small
                    // (all angles are ~90 degree) then write quandrange
                    // vertices to resultant sequence
                    if( maxCosine < 0.3 )
                        squares.push_back(approx);
                }
            }
        }
    }
}


void stereo_camera_ranging::drawSquares( cv::Mat& image, const POINT_VECTOR& squares )
{
    for( size_t i = 0; i < squares.size(); i++ )
    {
        const cv::Point* p = &squares[i][0];

        int n = (int)squares[i].size();
        //dont detect the border
        if (p-> x > 3 && p->y > 3)
          polylines(image, &p, &n, 1, true, cv::Scalar(0,255,0), 3, cv::LINE_AA);
    }

    imshow("draw squares window", image);
}

cv::Point3d stereo_camera_ranging::uv2xyz(cv::Point2d uvLeft,cv::Point2d uvRight)
{
    //  [u1]      |X|					  [u2]      |X|
    //Z*|v1| = Ml*|Y|					Z*|v2| = Mr*|Y|
    //  [ 1]      |Z|					  [ 1]      |Z|
    //			  |1|								|1|
    cv::Mat mLeftRotation = g_left_rotation_matrix;
    cv::Mat mLeftTranslation = g_left_translation_matrix;
    cv::Mat mLeftRT = cv::Mat(3,4,CV_32F);//左相机M矩阵
    hconcat(mLeftRotation,mLeftTranslation,mLeftRT);
    cv::Mat mLeftIntrinsic = g_left_camera_Intrinsic_matrix;
    cv::Mat mLeftM = mLeftIntrinsic * mLeftRT;
    //cout<<"左相机M矩阵 = "<<endl<<mLeftM<<endl;

    cv::Mat mRightRotation = g_rotation_matrix;
    cv::Mat mRightTranslation = g_translation_matrix;
    cv::Mat mRightRT = cv::Mat(3,4,CV_32F);//右相机M矩阵
    hconcat(mRightRotation,mRightTranslation,mRightRT);
    cv::Mat mRightIntrinsic = g_right_camera_intrinsic_matrix;
    cv::Mat mRightM = mRightIntrinsic * mRightRT;
    //cout<<"右相机M矩阵 = "<<endl<<mRightM<<endl;

    //这个最小二乘矩阵直接可以硬求出来，直接用xyz代表s进行替换消元得到下面的表达式
    //最小二乘法A矩阵
    cv::Mat A = cv::Mat(4,3,CV_32F);
    A.at<float>(0,0) = uvLeft.x * mLeftM.at<float>(2,0) - mLeftM.at<float>(0,0);
    A.at<float>(0,1) = uvLeft.x * mLeftM.at<float>(2,1) - mLeftM.at<float>(0,1);
    A.at<float>(0,2) = uvLeft.x * mLeftM.at<float>(2,2) - mLeftM.at<float>(0,2);

    A.at<float>(1,0) = uvLeft.y * mLeftM.at<float>(2,0) - mLeftM.at<float>(1,0);
    A.at<float>(1,1) = uvLeft.y * mLeftM.at<float>(2,1) - mLeftM.at<float>(1,1);
    A.at<float>(1,2) = uvLeft.y * mLeftM.at<float>(2,2) - mLeftM.at<float>(1,2);

    A.at<float>(2,0) = uvRight.x * mRightM.at<float>(2,0) - mRightM.at<float>(0,0);
    A.at<float>(2,1) = uvRight.x * mRightM.at<float>(2,1) - mRightM.at<float>(0,1);
    A.at<float>(2,2) = uvRight.x * mRightM.at<float>(2,2) - mRightM.at<float>(0,2);

    A.at<float>(3,0) = uvRight.y * mRightM.at<float>(2,0) - mRightM.at<float>(1,0);
    A.at<float>(3,1) = uvRight.y * mRightM.at<float>(2,1) - mRightM.at<float>(1,1);
    A.at<float>(3,2) = uvRight.y * mRightM.at<float>(2,2) - mRightM.at<float>(1,2);

    //最小二乘法B矩阵
    cv::Mat B = cv::Mat(4,1,CV_32F);
    B.at<float>(0,0) = mLeftM.at<float>(0,3) - uvLeft.x * mLeftM.at<float>(2,3);
    B.at<float>(1,0) = mLeftM.at<float>(1,3) - uvLeft.y * mLeftM.at<float>(2,3);
    B.at<float>(2,0) = mRightM.at<float>(0,3) - uvRight.x * mRightM.at<float>(2,3);
    B.at<float>(3,0) = mRightM.at<float>(1,3) - uvRight.y * mRightM.at<float>(2,3);

    cv::Mat XYZ = cv::Mat(3,1,CV_32F);
    //采用SVD最小二乘法求解XYZ
    solve(A,B,XYZ,cv::DECOMP_SVD);

    //cout<<"空间坐标为 = "<<endl<<XYZ<<endl;

    //世界坐标系中坐标
    cv::Point3d world;
    world.x = XYZ.at<float>(0,0);
    world.y = XYZ.at<float>(1,0);
    world.z = XYZ.at<float>(2,0);

    return world;
}









