#include "detect_aruco/detect_aruco.hpp"

ArucoDetect::ArucoDetect(ros::NodeHandle& nh)
    :nh_(nh), it_(nh_)
{
    std::string camera_topic, camera_info;
    if (nh_.getParam("camera_topic", camera_topic))
    {
        ROS_INFO("Camera_topic is %s", camera_topic.c_str());
    }
    else
    {
        ROS_WARN("Didn't find parameter camera_topic");
        camera_topic = "/camera/color/image_raw";
    }

    if (nh_.getParam("camera_info", camera_info))
    {
        ROS_INFO("Camera_info is %s", camera_info.c_str());
    }
    else
    {
        ROS_WARN("Didn't find parameter camera_info");
        camera_info = "/home/shuai/ros_astra_ws/src/boat_vision/config/camera_param.yaml";
    }

    // 读取参数文档camera_param.yaml中的参数值；
    camera_config = YAML::LoadFile(camera_info);
    landpad_det_len = camera_config["landpad_det_len"].as<double>();

    // 读取相机标定参数
    readCameraParameters(camera_config, cameraMatrix, distCoeffs);
    // 获取Aruco预定义字典
    dictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_6X6_250);

    image_sub_ = nh_.subscribe(camera_topic, 1, &ArucoDetect::imageCallback, this);
    pose_pub_ = nh_.advertise<geometry_msgs::PoseStamped>("/aruco_detect/pose", 1);
    image_pub_ = it_.advertise("/camera/color/image_raw/aruco_detect_image", 1);
    switch_pub_ = nh_.advertise<std_msgs::Bool>("/prometheus/switch/landpad_det", 10);
}

ArucoDetect::~ArucoDetect(){}

void ArucoDetect::readCameraParameters(YAML::Node &camera_config, cv::Mat &cameraMatrix, cv::Mat &distCoeffs)
{
    // 相机内部参数
    double fx = camera_config["fx"].as<double>();
    double fy = camera_config["fy"].as<double>();
    double cx = camera_config["x0"].as<double>();
    double cy = camera_config["y0"].as<double>();
    // 相机畸变系数
    double k1 = camera_config["k1"].as<double>();
    double k2 = camera_config["k2"].as<double>();
    double p1 = camera_config["p1"].as<double>();
    double p2 = camera_config["p2"].as<double>();
    double k3 = camera_config["k3"].as<double>();

    cameraMatrix = cv::Mat(3, 3, CV_64FC1, cv::Scalar::all(0));
    cameraMatrix.ptr<double>(0)[0] = fx;
    cameraMatrix.ptr<double>(0)[2] = cx;
    cameraMatrix.ptr<double>(1)[1] = fy;
    cameraMatrix.ptr<double>(1)[2] = cy;
    cameraMatrix.ptr<double>(2)[2] = 1.0f;

    distCoeffs = cv::Mat(5, 1, CV_64FC1, cv::Scalar::all(0));
    distCoeffs.ptr<double>(0)[0] = k1;
    distCoeffs.ptr<double>(1)[0] = k2;
    distCoeffs.ptr<double>(2)[0] = p1;
    distCoeffs.ptr<double>(3)[0] = p2;
    distCoeffs.ptr<double>(4)[0] = k3;
}

void ArucoDetect::readCameraParameters(cv::Mat &cameraMatrix, cv::Mat &distCoeffs)
{
    cameraMatrix = (cv::Mat_<float>(3, 3) <<
        307.10092,   0.     , 314.4606 ,
        0.     , 307.11747, 199.90106,
        0.     ,   0.     ,   1.     );
    distCoeffs = (cv::Mat_<float>(5, 1) << -0.019218, 0.012827, -0.000142, -0.000708, 0.000000);
}

void ArucoDetect::imageCallback(const sensor_msgs::ImageConstPtr& msg)
{
    try
    {
        // 将ROS图像消息转换为OpenCV图像格式
        cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);

        // 图像处理
        // maker_detect_video(image, dictionary);
        maker_pose_estimation(cv_ptr, dictionary, cameraMatrix, distCoeffs);

        // 显示图像宽度和高度
        // ROS_INFO("Received image. Width: %d, Height: %d", image.cols, image.rows);
    } catch (cv_bridge::Exception& e) {
        ROS_ERROR("CV Bridge Exception: %s", e.what());
    }
}

void ArucoDetect::maker_detect_video(cv_bridge::CvImagePtr cv_ptr, cv::Ptr<cv::aruco::Dictionary> &dictionary)
{
    cv::Mat imageCopy;
    cv_ptr->image.copyTo(imageCopy);
    std::vector<int> ids;
    std::vector<std::vector<cv::Point2f>> corners;
    cv::aruco::detectMarkers(cv_ptr->image, dictionary, corners, ids);
    // if at least one marker detected
    if (ids.size() > 0)
        cv::aruco::drawDetectedMarkers(imageCopy, corners, ids);
    // else
    //     printf("num is %ld\n",ids.size());
}

void ArucoDetect::maker_pose_estimation(cv_bridge::CvImagePtr cv_ptr, cv::Ptr<cv::aruco::Dictionary> &dictionary, cv::Mat &cameraMatrix, cv::Mat &distCoeffs)
{
    std::vector<int> ids;
    std::vector<std::vector<cv::Point2f>> corners;
    cv::aruco::detectMarkers(cv_ptr->image, dictionary, corners, ids);
    // if at least one marker detected
    if (ids.size() > 0) {
        cv::aruco::drawDetectedMarkers(cv_ptr->image, corners, ids);
        std::vector<cv::Vec3d> rvecs, tvecs;
        cv::aruco::estimatePoseSingleMarkers(corners, landpad_det_len, cameraMatrix, distCoeffs, rvecs, tvecs);
        // draw axis for each marker
        for(int i=0; i<ids.size(); i++)
        {
            cv::aruco::drawAxis(cv_ptr->image, cameraMatrix, distCoeffs, rvecs[i], tvecs[i], 0.1);
            std::cout << "rvecs " << rvecs[i] << std::endl;
            std::cout << "tvecs " << tvecs[i] << std::endl;
            std::cout << "id is " << ids[i] << std::endl;
            if(tvecs[i][2] < 1)
            {
                // 定义文本参数
                cv::String text = "Drone landing";
                cv::Point textPosition(cv_ptr->image.cols * 0.7, cv_ptr->image.rows * 0.1); // 文本位置
                int fontFace = cv::FONT_HERSHEY_SIMPLEX;
                double fontScale = 0.8;
                cv::Scalar color = CV_RGB(255, 0, 0);
                int thickness = 2;

                // 在图像上绘制文本
                cv::putText(cv_ptr->image, text, textPosition, fontFace, fontScale, color, thickness, cv::LINE_AA);

                // 创建一个布尔消息
                std_msgs::Bool switch_msg;
                switch_msg.data = true;

                // 发布消息
                switch_pub_.publish(switch_msg);
            }

            // 填充自定义消息
            geometry_msgs::PoseStamped pose_msg;
            pose_msg.header.stamp = ros::Time::now();
            pose_msg.header.frame_id = "camera_link";  // Set the frame_id as needed

            // Set the pose position (tvec)
            pose_msg.pose.position.x = tvecs[i][0];
            pose_msg.pose.position.y = tvecs[i][1];
            pose_msg.pose.position.z = tvecs[i][2];

            // 将旋转向量转换为四元数
            tf2::Quaternion quaternion;
            tf2::Vector3 tf_rotation_vector(rvecs[i][0], rvecs[i][1], rvecs[i][2]);
            quaternion.setRotation(tf_rotation_vector.normalized(), tf_rotation_vector.length());

            // Set the pose orientation (rvec)
            pose_msg.pose.orientation = tf2::toMsg(quaternion);

            // Publish the pose message
            pose_pub_.publish(pose_msg);
        }
    }
    image_pub_.publish(cv_ptr->toImageMsg());
    // cv::imshow("out", imageCopy);
    // cv::waitKey(10);
}