#include "targetsolver.h"
#include "../autoaim_node.hpp"

TargetSolver::TargetSolver() = default;

TargetSolver::~TargetSolver() = default;

/**
 * @brief PnP解算初始化，引入内参矩阵和畸变矩阵
 */
void TargetSolver::init() {}

std::vector<cv::Point3f> Generate_test_data(double half_w, double half_h, double x, double y, double z, double theta,double pitch);
cv::Point2f camera_model(double f, int u, int cx, int cy, cv::Point3f point);
void draw_armor(std::vector<cv::Point2f> points2dtrue,cv::Mat &blankImage, cv::Scalar scalar);
double get_theta(cv::Point2f A, cv::Point2f B);
double get_delta(std::vector<cv::Point2f> points2d_true, std::vector<cv::Point2f> points2d_guess);
double get_distance(cv::Point2f A, cv::Point2f B);
double get_distance_delta(cv::Point2f A, cv::Point2f B, cv::Point2f C, cv::Point2f D);
int findMinIndex(const double arr[], int length);

// int main() {
// 	double half_w = 1.5;
// 	double half_h = 1;
// 	int stddev = 2;
// 	int theta = 3140;
// 	bool if_gauss = true;
// 	double delta_;

// 	//double yaw_predict[628] ;
// 	//for (int i = 0; i < 628; i++) {
// 	//	yaw_predict[i] = i;
// 	//	//cout << yaw_predict[i] << endl;
// 	//}
// 	//double delta[628];
// 	//for (int i = 0; i < 628; i++) {
// 	//	array<Point3f, 4> points3dguess = Generate_test_data(half_w, half_h, 0, 0, 5, yaw_predict [i] / 100 - 3.14);
// 	//	array<Point3f, 4> points3dtrue = Generate_test_data(half_w, half_h, 0, 0, 5, CV_PI/2);
// 	//	delta_ = get_delta(points3dguess, points3dtrue);
// 	//	//cout << delta_ << endl;
// 	//	delta[i] = delta_*50;
// 	//}
// 	//Mat blankImage(480, 640, CV_8UC3, cv::Scalar(255, 255, 255));
// 	//for (int i = 0; i < 628; i++){
// 	//	circle(blankImage, Point2f(yaw_predict[i],480-delta[i]), 5, Scalar(0, 0, 0), -1);
// 	//}

// 	//double true_theta = static_cast<double>(findMinIndex(delta,628))/100-3.14;
// 	//cout << true_theta << endl;
// 	//imshow("testimg", blankImage);
// 	//waitKey(0);



// 	cv::namedWindow("Trackbars", (640, 200));
// 	cv::createTrackbar("Theta", "Trackbars", &theta, 6280);
// 	double theta_ = theta;
// 	while (1) {
// 		cv::Mat blankImage(960, 1280, CV_8UC3, cv::Scalar(255, 255, 255));
// 		theta_ = theta;
// 		std::array<cv::Point3f, 4> points3dtrue = Generate_test_data(half_w, half_h, 0, 0, 5, theta_ / 1000 - 3.14);
// 		double yaw_predict[628];
// 		for (int i = 0; i < 628; i++) {
// 			yaw_predict[i] = i;
// 			//cout << yaw_predict[i] << endl;
// 		}
// 		double delta[628];
// 		for (int i = 0; i < 628; i++) {
// 			std::array<cv::Point3f, 4> points3dguess = Generate_test_data(half_w, half_h, 0, 0, 5, yaw_predict[i] / 100 - 3.14);
// 			delta_ = get_delta(points3dguess, points3dtrue);
// 			//cout << delta_ << endl;
// 			delta[i] = delta_ * 50;
// 		}
// 		double true_theta = static_cast<double>(findMinIndex(delta, 628)) / 100 - 3.14;
// 		draw_armor(points3dtrue, blankImage, cv::Scalar(0, 255, 0));
// 		//draw_armor(points3dguess, blankImage,Scalar(255, 0, 0));
// 		std::string text = cv::format("true_theta: %.2lf", true_theta);
// 		putText(blankImage, text, cv::Point2f(100,100), cv::FONT_HERSHEY_SIMPLEX, 3, cv::Scalar(0, 0, 255), 2);
// 		cv::imshow("testimg", blankImage);
// 		cv::waitKey(1);
// 	}

	
// }

cv::Point2f camera_model(double f, int u, int cx, int cy, cv::Point3f point)
{
	//	cout << "3D points:" << points3d << endl;
	cv::Point2f points2d = cv::Point2f(round(f * u * point.x / point.z + cx), round(f * u * point.y / point.z + cy));
	return points2d;
}


int findMinIndex(const double arr[], int length) {
	int minIndex = 0;

	for (int i = 1; i < length; ++i) {
		if (arr[i] < arr[minIndex]) {
			minIndex = i;
		}
	}
	return minIndex;
}

double get_theta(cv::Point2f A, cv::Point2f B) {
	if (A.x == B.x) {
		return CV_PI / 2;
	}
	//else if (A.x > B.x) {
	//	return atan((A.y - B.y) / (A.x - B.x));
	//}
	//else return atan((A.y - B.y) / (A.x - B.x) + CV_PI);
	else return atan((A.y - B.y) / (A.x - B.x));
}

double get_distance(cv::Point2f A, cv::Point2f B) {
	return sqrt((A.x - B.x) * (A.x - B.x) + (A.y - B.y) * (A.y - B.y));
}

double get_distance_delta(cv::Point2f A, cv::Point2f B,cv::Point2f C ,cv::Point2f D) {
	cv::Point2f AB = (A + B) / 2;
	cv::Point2f CD = (C + D) / 2;
	return (get_distance(AB, CD) / get_distance(A, B) + get_distance(AB, CD) / get_distance(C, D)) / 2;
}

double get_delta(std::vector<cv::Point2f> points2d_true, std::vector<cv::Point2f> points2d_guess) {
	double delta_1 = abs(get_theta(points2d_true[0], points2d_true[1]) - get_theta(points2d_guess[0], points2d_guess[1]));
	double delta1 = get_distance_delta(points2d_true[0], points2d_true[1], points2d_guess[0], points2d_guess[1]);
	double delta_2 = abs(get_theta(points2d_true[1], points2d_true[2]) - get_theta(points2d_guess[1], points2d_guess[2]));
	double delta2 = get_distance_delta(points2d_true[1], points2d_true[2], points2d_guess[1], points2d_guess[2]);
	double delta_3 = abs(get_theta(points2d_true[2], points2d_true[3]) - get_theta(points2d_guess[2], points2d_guess[3]));
	double delta3 = get_distance_delta(points2d_true[2], points2d_true[3], points2d_guess[2], points2d_guess[3]);
	double delta_4 = abs(get_theta(points2d_true[3], points2d_true[0]) - get_theta(points2d_guess[3], points2d_guess[0]));
	double delta4 = get_distance_delta(points2d_true[3], points2d_true[0], points2d_guess[3], points2d_guess[0]);
	
	//cout << get_theta(points2d_true[0], points2d_true[1]) << endl;
	return delta_1 + delta_2 + delta_3 + delta_4+ delta1+ delta2+ delta3+ delta4;
}

void draw_armor(std::vector<cv::Point2f> points2dtrue,cv::Mat &blankImage, cv::Scalar scalar) {
	//std::array<cv::Point2f, 4> points2dtrue = { camera_model(0.5,1000,640,480,points3dtrue[0]),camera_model(0.5,1000,640,480,points3dtrue[1]),camera_model(0.5,1000,640,480,points3dtrue[2]),camera_model(0.5,1000,640,480,points3dtrue[3]) };
	for (int i = 0; i < 4; i++) {
		circle(blankImage, points2dtrue[i], 5, scalar, -1);
		line(blankImage, points2dtrue[i], points2dtrue[(i+1)%4], scalar, 2);
	}
}




std::vector<cv::Point3f> Generate_test_data(double half_w, double half_h, double x, double y, double z, double theta,double pitch) {
	std::vector<cv::Point3f> points3d(4);
	points3d[0] = cv::Point3f( - half_w , half_h, 0);
	points3d[1] = cv::Point3f( half_w , half_h, 0);
	points3d[2] = cv::Point3f( half_w , -half_h, 0);
	points3d[3] = cv::Point3f( - half_w , -half_h, 0);
	//cout << "current theta is: " << 180 * theta / CV_PI << endl;
    pitch = CV_PI*pitch/180 + CV_PI/12;
	Eigen::Matrix3d rotation_matrix_init;
	Eigen::Matrix3d rotation_matrix_yaw;
	Eigen::Matrix3d rotation_matrix_pitch;
	Eigen::Matrix3d rotation_matrix_roll;
	double cos_yaw = cos(theta);
	double sin_yaw = sin(theta);
	double cos_pitch = cos(pitch);
	double sin_pitch = sin(pitch);
	double cos_roll = cos(0);
	double sin_roll = sin(0);
	//cout << cos_yaw << "   " << sin_yaw << endl;
	rotation_matrix_init << 1, 0, 0, 0, 0, -1, 0, 1, 0;
	rotation_matrix_yaw << cos_yaw, 0, sin_yaw, 0, 1, 0, -sin_yaw, 0, cos_yaw;
	rotation_matrix_pitch << 1, 0, 0, 0, cos_pitch, sin_pitch, 0, -sin_pitch, cos_pitch;
	rotation_matrix_roll << cos_roll, sin_roll, 0, -sin_roll, cos_roll, 0, 0, 0, 1;
	for (int i = 0; i < 4; i++) {
		Eigen::Vector3d displacement(points3d[i].x, points3d[i].y, points3d[i].z);
		Eigen::Vector3d displacement_camera =rotation_matrix_yaw  * rotation_matrix_pitch * displacement;
		//gen::Vector3d displacement_camera =rotation_matrix_yaw * displacement;
		points3d[i] = cv::Point3f(displacement_camera[0]+x, displacement_camera[1]+y, displacement_camera[2]+z);
	}
	return points3d;
}


//新版
bool TargetSolver::armor_getCoord(const Armor &armor, const Armor &armor_deputy,Target &target,const Pose &pose, const int& armor_num)
{
    cv::Point2f left_up, left_down, right_up, right_down;
    if(!armor.if_use_net){
        //对主装甲板：
        cv::Point2f left_vertices[4];
        cv::Point2f right_vertices[4];
        std::vector<cv::Point2f>left_vertices_;
        std::vector<cv::Point2f>right_vertices_;
        armor.lbar_rect.points(left_vertices);
        armor.rbar_rect.points(right_vertices);
        left_vertices_.assign(left_vertices,left_vertices+4);
        right_vertices_.assign(right_vertices,right_vertices+4);
        /* 按照y坐标进行排序。注意：y轴正方向向下
        将四个点的y坐标由小到大排序 */
        sort(left_vertices_.begin(), left_vertices_.end(), [](const cv::Point2f &p1, const cv::Point2f &p2) { return p1.y < p2.y; });
        // 再根据四个点的y坐标确定相关位置
        left_up = (left_vertices_[0] + left_vertices_[1]) * 0.5;
        left_down = (left_vertices_[2] + left_vertices_[3]) * 0.5;
        sort(right_vertices_.begin(), right_vertices_.end(), [](const cv::Point2f &p1, const cv::Point2f &p2) { return p1.y < p2.y; });
        right_up = (right_vertices_[0] + right_vertices_[1]) * 0.5;
        right_down = (right_vertices_[2] + right_vertices_[3]) * 0.5;


    }
    else{
        left_up =armor.coord_points[0];
        left_down =armor.coord_points[3];
        right_up =armor.coord_points[1];
        right_down =armor.coord_points[2];
    }
// #ifndef _USE_NETWORK


    double half_w = 0, half_h = 0;
    if (6 == autoaim_node->mode)
    {
        half_w = 37;
        half_h = 37 * cos(15 * acos(-1.0) / 180);
    }
    else if (armor.isBigArmor)
    {
        // std::cout << "is big armor!" << std::endl;
        half_w = half_big_armor_width;
        half_h = half_big_armor_height;
    }
    else
    {
        half_w = half_small_armor_width;
        half_h = half_small_armor_height;
    }

    target.half_w = half_w / 1000.0;
    target.half_h = half_h / 1000.0;



    /* 根据装甲板类型的不同，
       将装甲板中心作为空间坐标系的原点，
       其四个顶点作为2D-3D的四对点 */
    // points3d中的点需和points2d中的点按顺序一一对应
    std::vector<cv::Point2f> points2d;
    points2d.clear();
    std::vector<cv::Point3f> points3d;
    points3d.clear();

    bool success;

    cv::Mat P = (cv::Mat_<double>(3, 3) << 0, 0, 0, 0, 0, 0, 0, 0, 0);//过渡矩阵


    if (sqrt(pow(left_up.x - left_down.x, 2) + pow(left_up.y - left_down.y, 2)) > sqrt(pow(right_up.x - right_down.x, 2) + pow(right_up.y - right_down.y, 2)))
    {
        points2d.emplace_back(left_up);
        points2d.emplace_back(left_down);
        points2d.emplace_back((right_up + right_down) * 0.5);


        points3d.emplace_back(cv::Point3f(-half_w, half_h, 0));
        points3d.emplace_back(cv::Point3f(-half_w, -half_h, 0));
        points3d.emplace_back(cv::Point3f(half_w, 0, 0));

        rotate_mat = (cv::Mat_<double>(3, 1) << 0 , 0 , 1);
        trans_mat = (cv::Mat_<double>(3, 1) << 0 , 0 , 3000);
    }
    else
    {
        points2d.emplace_back(right_up);
        points2d.emplace_back(right_down);
        points2d.emplace_back((left_up + left_down) * 0.5);


        points3d.emplace_back(cv::Point3f(half_w, half_h, 0));
        points3d.emplace_back(cv::Point3f(half_w, -half_h, 0));
        points3d.emplace_back(cv::Point3f(-half_w, 0, 0));

        rotate_mat = (cv::Mat_<double>(3, 1) << 0 , 0 , -1);
        trans_mat = (cv::Mat_<double>(3, 1) << 0 , 0 , 3000);
    }

    // RCLCPP_INFO(autoaim_node->get_logger(), "targetsolver/637");
    success = P3P_HERO(points3d, points2d, g_camera_internal_matrix, g_distortion_coeff, rotate_mat, trans_mat);
    // RCLCPP_INFO(autoaim_node->get_logger(), "p3p: %lf, %lf, %lf", rotate_mat.at<double>(0, 0), rotate_mat.at<double>(1, 0), rotate_mat.at<double>(2, 0));
    // cv::Rodrigues(rotate_mat, P);
    // RCLCPP_INFO(autoaim_node->get_logger(), "p3p: %lf, %lf, %lf", P.at<double>(0, 2), P.at<double>(1, 2), P.at<double>(2, 2));
    // if (0 == success)
    // {
        points2d.clear();
        points3d.clear();


        points2d.emplace_back(left_up);
        points2d.emplace_back(right_up);
        points2d.emplace_back(right_down);
        points2d.emplace_back(left_down);


        points3d.push_back(cv::Point3f(-half_w, half_h, 0));
        points3d.push_back(cv::Point3f(half_w, half_h, 0));
        points3d.push_back(cv::Point3f(half_w, -half_h, 0));
        points3d.push_back(cv::Point3f(-half_w, -half_h, 0));
        success = solvePnP(points3d, points2d, g_camera_internal_matrix, g_distortion_coeff, rotate_mat, trans_mat, false, cv::SOLVEPNP_IPPE);
        // cv::Rodrigues(rotate_mat, P);
        // RCLCPP_INFO(autoaim_node->get_logger(), "pNp: %lf, %lf, %lf", P.at<double>(0, 2), P.at<double>(1, 2), P.at<double>(2, 2));
        // RCLCPP_INFO(autoaim_node->get_logger(), "pnp: %lf, %lf, %lf", rotate_mat.at<double>(0, 0), rotate_mat.at<double>(1, 0), rotate_mat.at<double>(2, 0));
    // }

    


    
    // if (trans_mat.at<double>(2, 0) < 200)
    // {
    //     trans_mat = trans_mat_2;
    //     // RCLCPP_INFO(autoaim_node->get_logger(), "p3p error!");
    // }
    cv::Rodrigues(rotate_mat, P);
    double yaw = -atan2(P.at<double>(2, 2), P.at<double>(0, 2)) * 180 / acos(-1.0);
    // if (yaw > 70 && yaw < 110)
    // {
    //     rotate_mat = rotate_mat_2;
    //     cv::Rodrigues(rotate_mat, P);
    //     // RCLCPP_INFO(autoaim_node->get_logger(), "middle");
    // }

    autoaim_node->autoaim_recorder.pnp_x = trans_mat.at<double>(0, 0);
    autoaim_node->autoaim_recorder.pnp_y = trans_mat.at<double>(1, 0);
    autoaim_node->autoaim_recorder.pnp_z = trans_mat.at<double>(2, 0);
    // autoaim_node->autoaim_recorder.pnp_z_x = yaw;
    // autoaim_node->autoaim_recorder.pnp_z_y = P.at<double>(2, 1);
    // autoaim_node->autoaim_recorder.pnp_z_z = P.at<double>(2, 2);

    

    


    if(!USE_COMM){
        if(DEBUG){
            // cv::putText(testimg, "target_x_camera: " + std::to_string(trans_mat.at<double>(0, 0)), cv::Point(20, 130), cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(0, 255, 0));
            // cv::putText(testimg, "target_y_camera: " + std::to_string(trans_mat.at<double>(1, 0)), cv::Point(20, 180), cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(0, 255, 0));
            // cv::putText(testimg, "target_z_camera: " + std::to_string(trans_mat.at<double>(2, 0)), cv::Point(20, 230), cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(0, 255, 0));
        }
    }

    // std::cout << g_camera_internal_matrix.at<double>(0, 0) << "  ";
    // std::cout<<"x:"<<trans_mat.at<double>(0, 0) << "  ";
    // std::cout<<"y:"<<trans_mat.at<double>(1, 0) << "  "; 
    // std::cout<<"z:"<<trans_mat.at<double>(2, 0) << std::endl;
    // std::cout<<"dis:"<<sqrt(pow(trans_mat.at<double>(0, 0),2)+pow(trans_mat.at<double>(2, 0),2))<<std::endl;
    // std::cout << "pnp finished!" << std::endl;
    //for(int i = 0;i<4;i++){cv::circle(drawimage, points2d[i], 2, cv::Scalar(255,0, 0), -1, 8, 0);}

    // 暂时不需要使用旋转矩阵，需要用取消注释
    //Rodrigues(rotate_mat, rotate_mat);
    if(success)
    {
        // RCLCPP_INFO(autoaim_node->get_logger(), "01");
        camera2ptz(trans_mat, target);
        autoaim_node->autoaim_recorder.target_ptz_x = target.x;
        autoaim_node->autoaim_recorder.target_ptz_y = target.y;
        autoaim_node->autoaim_recorder.target_ptz_z = target.z;


        cv::Mat offset = (cv::Mat_<double>(3, 1) << X_OFFSET / 1000.0, Y_OFFSET / 1000.0, Z_OFFSET / 1000.0);
        cv::Mat originalPoints[4];
        originalPoints[0] = (cv::Mat_<double>(2, 1) << (double)left_up.x, (double)left_up.y);
        originalPoints[1] = (cv::Mat_<double>(2, 1) << (double)right_up.x, (double)right_up.y);
        originalPoints[2] = (cv::Mat_<double>(2, 1) << (double)right_down.x, (double)right_down.y);
        originalPoints[3] = (cv::Mat_<double>(2, 1) << (double)left_down.x, (double)left_down.y);
        cv::Mat tvec = trans_mat;
        Pose pose_tmp;
        pose_tmp.ptz_yaw = pose.ptz_yaw * M_PI / 180;
        pose_tmp.ptz_pitch = pose.ptz_pitch * M_PI / 180;
        pose_tmp.ptz_roll = pose.ptz_roll * M_PI / 180;

        for (int i = 0; i < 3; i++)
        {
            tvec.at<double>(i, 0) = tvec.at<double>(i, 0) / 1000.0;
        }
        double armor_pitch = (3 == armor_num) ? -M_PI/12 : M_PI/12;
        // double armor_pitch = M_PI/12;
        target.o_yaw = estimate_o_yaw(trans_mat, offset, pose_tmp, half_w/1000.0, half_h/1000.0, armor_pitch, g_camera_internal_matrix, g_distortion_coeff, originalPoints);
        // RCLCPP_INFO(autoaim_node->get_logger(), "frame_width: %d", autoaim_node->frame_width);
        if (6 == autoaim_node->car_id)
        {
            cv::Point armor_center;
            armor_center = armor.armor_rect.center;
            // std::cout << autoaim_node->frame_width << std::endl;
            // RCLCPP_INFO(autoaim_node->get_logger(), "frame_width: %d", autoaim_node->frame_width);
            // std::cout << armor_center.x << std::endl;
            // target.o_yaw += (20.0/autoaim_node->frame_width*armor_center.x-10)*M_PI/180;
        }
        // double tmp_yaw = target.o_yaw;
        // tmp_yaw += M_PI;
        // tmp_yaw = atan2(cos(tmp_yaw), sin(tmp_yaw));
        autoaim_node->autoaim_recorder.pnp_z_z = target.o_yaw * 180 / M_PI;
        // std::cout << target.o_yaw * 180 / M_PI << std::endl;
        // RCLCPP_INFO(autoaim_node->get_logger(), "o_yaw: %lf", target.o_yaw * 180 / M_PI);
        return true;
    }
    else 
    {
//        LOGE("pnp failed!");
        RCLCPP_INFO(autoaim_node->get_logger(), "pnp failed!");
        return false;
    }


}


// 以米作单位，offset补偿值源于机器人摄像头与云台所设定的原点间有物理距离
void TargetSolver::camera2ptz(const cv::Mat &camera_position, Target &ptz_position)
{
    ptz_position.x = ((camera_position.at<double>(0, 0) + X_OFFSET)) / 1000.0;
    ptz_position.y = ((camera_position.at<double>(1, 0) + Y_OFFSET)) / 1000.0;
    ptz_position.z = ((camera_position.at<double>(2, 0) + Z_OFFSET)) / 1000.0;
}

void TargetSolver::projectPoint(Target &target, double std_height)
{
    double k = std_height / target.z;
    target.x = target.x * k;
    target.y = target.y * k;
    target.z = std_height;
}