#include <stdlib.h>
#include <iostream>
#include <fstream>
#include <Eigen/Core>
#include <Eigen/Geometry>
#include <Eigen/LU>
#include <Eigen/Dense>
#include <Eigen/StdVector>
#include <Eigen/Geometry>  // For AngleAxis and log


#include <opencv2/core/core.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/calib3d.hpp>
#include <opencv2/core/eigen.hpp>

#define PI 3.1415926


//  现标定得到H_Color2Robot,即gHc,根据彩色相机与深度相机的外参矩阵，
//  H_Color2Robot=Depth*Extrinsic,即H_Depth=H_Color2Robot*Extrinsic^(-1),
//  现在将得到的深度信息XYZ左乘H_Depth,结果为机器人末端坐标系的坐标XYZ_TC=H_Depth * (X,Y,Z,1)
//  若想转换为机器人基坐标系,需要将当前机器人位姿转为变换矩阵H_pose,XYZ_base=H_pose * H_Depth * (X,Y,Z,1)

int main()
{
	Eigen::Matrix4d Color,Depth,Extrinsic;
    // Color<<-0.02793506918749389,0.9993362014619749,-0.023383506089580654,-89.09270465210298,
    //     -0.9995016337106712,-0.027580377807756018, 0.015356007608789227,13.292468622057633,
    //     0.014700888380968015,0.023800823673414497, 0.9996086257497367,50.69910739006681,
    //     0.0,0.0,0.0, 1.0;

    Color<<-0.0277091, 0.999368, -0.022279, -85.1186 ,
-0.999526 ,-0.027401, 0.014017, 13.4204 ,
0.0133977 ,0.0226568, 0.999654, 51.5508 ,
0, 0, 0, 1 ;

    Depth<<-0.013476721195340394,0.9993342802546519,-0.03390233562104377,-88.9226319461877,
        -0.999907517195769,-0.013531173386057825,-0.0013772345013592334,27.258822602272076,
        -0.0018350563742372816,0.0338806393879857,0.9994241854704762,49.98395880140714,
        0.0,0.0,0.0,1.0;

    Extrinsic<<0.999759 , 0.0140664 , -0.0168738 , 13.966,
        -0.013894 , 0.999851 , 0.0102917 , 0.0432514,
        0.017016 , -0.0100547 , 0.999805 , 0.739738,
        0 , 0 , 0 , 1;
    // Perform the left division (solve Color * X = Depth)
    Eigen::Matrix4d H_Depth2Robot = Color*Extrinsic.inverse();

    // Print the result
    // std::cout << "The result of Depth left divided by Color is: \n" << H_Depth2Robot << std::endl;


    Eigen::Matrix4d pose1 , pose2 , XYZ1 , XYZ2 , H_Depth;
    pose1<<-0.995406 , -3.55211e-05 , -0.0957483 , -312.745,
-3.70047e-05 , 1 , 1.37193e-05 , 0.10066,
0.0957483 , 1.71994e-05 , -0.995406 , -466.641,
0 , 0 , 0 , 1;
    pose2<<-0.991621 , 0.0871204 , -0.0953827 , -312.745,
0.0867184 , 0.996198 , 0.00835868 , 0.10066,
0.0957483 , 1.71994e-05 , -0.995406 , -466.641,
0 , 0 , 0 , 1;
    XYZ1<<-115.848 , 134.061 , -117.142 , 132.268 ,
    77.6036 , 75.202 , -96.69 ,-99.043 ,
    420,419,409 , 408,
    0,0,0,1;
    XYZ2<<-116.902 , 131.85 , -103.471 , 145.653 ,
    65.447 , 84.513 , -108.974 ,-89.113 ,
    419,419,408 , 408,
    0,0,0,1;
    
    H_Depth<<-0.0277091 ,0.999368, -0.022279 ,-92.1186 ,
-0.999526 ,-0.027401 ,0.014017, 11.4204 ,
0.0133977, 0.0226568 ,0.999654, 51.5508 ,
0, 0, 0 ,1 ;

    Eigen::Matrix4d result1 = pose1 * H_Depth * XYZ1;
    Eigen::Matrix4d result2 = pose2 * H_Depth * XYZ2;
    std::cout << "The result1: \n" << result1 << std::endl;
    std::cout << "The result2: \n" << result2 << std::endl;

    Eigen::Vector4d P_a(-116.902, 65.447, 419, 1); // (x, y, z, 1)
    Eigen::Vector4d PofCamera =  Extrinsic * P_a;
    std::cout << "The PofCamera: \n" << PofCamera << std::endl;
    // // 相机坐标系下的目标点 P_c (假设)
    // Eigen::Vector4d P_c(134.621002, 73.799698, 588, 1); // (x, y, z, 1)
    // Eigen::Vector4d P_a(109.917999, 74.5485, 589, 1); // (x, y, z, 1)

    // std::vector<double> v(3, 0.0);
    // v[0]=180;
    // v[1]=-0.14;
    // v[2]=179.94;
	// 	std::cout << "RPY: " << v[0] <<  " "<< v[1] <<  " "<< v[2] <<  " " << std::endl;
	// 	Eigen::Quaterniond m = Eigen::AngleAxisd(v[2] / 180 * PI, Eigen::Vector3d::UnitZ())\
	// 	* Eigen::AngleAxisd(v[1] / 180 * PI, Eigen::Vector3d::UnitY())\
	// 	* Eigen::AngleAxisd(v[0] / 180 * PI, Eigen::Vector3d::UnitX());

    // Eigen::Vector3d trans_temp;
    // trans_temp<<267.37,-0.18,636.39;
	// 	double w, x, y, z;
	// 	x = m.x(); y= m.y(); z= m.z();  w= m.w();
	// 	Eigen::Quaterniond rot_temp(w, x, y, z);

	// 	Eigen::Matrix4d pose_temp;
	// 	pose_temp << Eigen::MatrixXd::Identity(4, 4);
	// 	//四元数转旋转矩阵
	// 	pose_temp.topLeftCorner(3, 3) << rot_temp.toRotationMatrix();
	// 	pose_temp.topRightCorner(3, 1) << trans_temp;

    // // 使用变换矩阵将相机坐标系下的点转换到机械臂末端坐标系
    // Eigen::Vector4d P_h = pose_temp*H_Depth2Robot * P_c;
    // Eigen::Vector4d P_h2 = pose_temp*H_Depth2Robot * P_a;

    //     // 提取三维坐标部分 (前三个元素)
    // Eigen::Vector3d point1 = P_h.head<3>();   // P_h 的前三个元素
    // Eigen::Vector3d point2 = P_h2.head<3>();  // P_h2 的前三个元素

    // // 计算欧氏距离
    // double distance = (point1 - point2).norm();  // 欧氏距离公式

    // // 输出距离
    // std::cout << "The Euclidean distance between the two points is: " << distance << std::endl;

    // std::cout << "The point in the hand (robot) coordinate system is: \n" << P_h.head<3>() << std::endl;

	return 0;
}
