#include <opencv2/calib3d.hpp>
#include <rclcpp/logging.hpp>
#include <rclcpp/rclcpp.hpp>
#include <cv_bridge/cv_bridge.h>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <rclcpp_components/register_node_macro.hpp>
#include <sensor_msgs/msg/image.hpp>
#include <sensor_msgs/image_encodings.hpp>

namespace palomino {
struct f3{
        float x = 0.0;
        float y = 0.0;
        float z = 0.0;
    };

struct position{
    f3 mposition = {0.0,0.0,0.0};
    double current_time = 0.0;
};

struct speed{
        f3 mspeed = {0.0,0.0,0.0};
        double current_time = 0.0;
    };

struct useTime{
    long int times = 0;
    double useTimeSum = 0.0;
    double average = 0.0;
};

class BoardDetection : public rclcpp::Node {
public:
    BoardDetection(const rclcpp::NodeOptions& options) 
        : Node("board_detection_node", rclcpp::NodeOptions(options).use_intra_process_comms(true)) {  // 启用进程内通信
        // 初始化订阅器
        sub_ = this->create_subscription<sensor_msgs::msg::Image>(
            "image_raw",
            rclcpp::QoS(rclcpp::QoSInitialization::from_rmw(rmw_qos_profile_t{
                RMW_QOS_POLICY_HISTORY_KEEP_LAST,
                1,
                RMW_QOS_POLICY_RELIABILITY_BEST_EFFORT,
                RMW_QOS_POLICY_DURABILITY_VOLATILE,
                RMW_QOS_DEADLINE_DEFAULT,
                RMW_QOS_LIFESPAN_DEFAULT,
                RMW_QOS_POLICY_LIVELINESS_SYSTEM_DEFAULT,
                RMW_QOS_LIVELINESS_LEASE_DURATION_DEFAULT,
                false
            })),  //确定QoS相同
            std::bind(&BoardDetection::imageCallback, this, std::placeholders::_1)
        );

        cv::namedWindow("view");
        cv::startWindowThread();
        RCLCPP_INFO(this->get_logger(), "Board detection node initialized and subscribed to image_raw");
    }

    ~BoardDetection() {
        cv::destroyWindow("view");
        RCLCPP_INFO(this->get_logger(), "Board detection node destroyed");
    }

    std::vector<cv::Point3d> boardPoints=std::vector<cv::Point3d>{
        cv::Point3d(-0.065,-0.0275, 0),
        cv::Point3d(-0.065, 0.0275, 0),
        cv::Point3d( 0.065, 0.0275, 0),
        cv::Point3d( 0.065,-0.0275, 0)
    };
    std::vector<cv::Point2d> quadPoints;

    cv::Mat cameraMatrix = (cv::Mat_<double>(3, 3) << 
            1720.623331817985,  0,                           641.1862135251633,
            0,                  1724.193394641095,           491.969263408209, 
            0,                  0,                           1);

    cv::Mat distortionCoefficients = (cv::Mat_<double>(5, 1) <<
    -0.0455753353769928, 0.3335555742001278, -0.00602518279942971, -0.007954908883944546, 0);

    cv::Mat rvec;
    cv::Mat tvec;

    std::vector<cv::Point3d> imu2camera = std::vector<cv::Point3d>{
        cv::Point3d(0.0,0.0,0.0),//平移
        cv::Point3d(0.0,0.0,0.0),//旋转
    };

    position position_current;
    position position_last;
    speed speed_current;
    speed speed_last;
    position maxT;
    position maxR;
    position minT;
    position minR;


    bool solvePnP(const std::vector<cv::Point3d>& objectPoints,
                  const std::vector<cv::Point2d>& imagePoints,
                  cv::Mat& rvec, cv::Mat& tvec) {
        if (objectPoints.size() != imagePoints.size() ||objectPoints.empty()) {
            RCLCPP_ERROR(this->get_logger(), "3D点和2D点数量不匹配或为空");
            return false;
        }

        // 使用AP3P解PnP
        bool success = cv::solvePnP(
            objectPoints, 
            imagePoints, 
            cameraMatrix, 
            distortionCoefficients, 
            rvec, 
            tvec, 
            false, 
            cv::SOLVEPNP_AP3P
        );

        if (!success) {
            RCLCPP_ERROR(this->get_logger(), "PnP解算失败");
        }
        return success;
    }

    f3 getSpeed(position& current, position& last){
            double speedx=0.0,speedy=0.0,speedz=0.0;
            speedx = (current.mposition.x - last.mposition.x)/(current.current_time-last.current_time);
            speedy = (current.mposition.y - last.mposition.y)/(current.current_time-last.current_time);
            speedz = (current.mposition.z - last.mposition.z)/(current.current_time-last.current_time);
            f3 speed;
            speed.x = speedx;
            speed.y = speedy;
            speed.z = speedz;
            return speed;
        }

    f3 getSpeedOfSpeed(speed& current, speed& last){
            double speedx=0.0,speedy=0.0,speedz=0.0;
            speedx = (current.mspeed.x - last.mspeed.x)/(current.current_time-last.current_time);
            speedy = (current.mspeed.y - last.mspeed.y)/(current.current_time-last.current_time);
            speedz = (current.mspeed.z - last.mspeed.z)/(current.current_time-last.current_time);
            f3 speed;
            speed.x = speedx;
            speed.y = speedy;
            speed.z = speedz;
            return speed;
        }
    
        useTime calculateUseTime;

private:
    void imageCallback(std::unique_ptr<sensor_msgs::msg::Image> msg) {
        try {
            RCLCPP_DEBUG(this->get_logger(), "通信后内存地址为%p",  static_cast<const void*>(&msg));
            auto sec = msg->header.stamp.sec;
            auto nanosec = msg->header.stamp.nanosec;
            double timestamp = static_cast<double>(sec) + static_cast<double>(nanosec) / 1e9;
            // RCLCPP_INFO(this->get_logger(), "当前时间: %f", timestamp);
            position_last.current_time = position_current.current_time;
            position_current.current_time = timestamp;
            speed_last.current_time = speed_current.current_time;
            speed_current.current_time = timestamp;
            cv::Mat image(msg->height, msg->width, CV_8UC3, msg->data.data());

            // 图像预处理
            cv::Mat processedImage = preprocessImage(image);

            // 检测并绘制结果
            cv::Mat afterFindImage = findBar(processedImage, image, quadPoints);
            
            // 输出当前时间戳
            auto now = this->get_clock()->now();
            auto now_a = this->now();
            RCLCPP_INFO(this->get_logger(), "this->now()节点时间: %f", now_a.seconds());
            RCLCPP_INFO(this->get_logger(), "this->get_clock()->now()时间：%f",now.seconds());
            RCLCPP_INFO(this->get_logger(), "信息时间戳时间: %f", timestamp);

            bool pnp_success = solvePnP(boardPoints, quadPoints, rvec, tvec);

            //输出pnp解算所用时间
            auto now_b = this->now();
            RCLCPP_INFO(this->get_logger(), "this->now()节点时间: %f", now_b.seconds());
            RCLCPP_INFO(this->get_logger(), "PnP解算时间: %f 秒", now_b.seconds() - now_a.seconds());
            double useTime = now_b.seconds() - now_a.seconds();
            calculateUseTime.times++;
            calculateUseTime.useTimeSum += useTime;
            calculateUseTime.average = calculateUseTime.useTimeSum / calculateUseTime.times;
            RCLCPP_INFO(this->get_logger(), "PnP平均解算时间: %f 秒", calculateUseTime.average);

            if (pnp_success) {
                RCLCPP_INFO(this->get_logger(), "三维位置：(%.2f, %.2f, %.2f)",
                tvec.at<double>(0, 0), tvec.at<double>(1, 0), tvec.at<double>(2, 0));
                position_last.mposition = position_current.mposition;
                position_current.mposition.x = tvec.at<double>(0, 0);
                position_current.mposition.y = tvec.at<double>(1, 0);
                position_current.mposition.z = tvec.at<double>(2, 0);

            //     if (tvec.at<double>(0, 0) > maxT.mposition.x) maxT.mposition.x = tvec.at<double>(0, 0);
            //     if (tvec.at<double>(1, 0) > maxT.mposition.y) maxT.mposition.y = tvec.at<double>(1, 0);
            //     if (tvec.at<double>(2, 0) > maxT.mposition.z) maxT.mposition.z = tvec.at<double>(2, 0);
            //     if (rvec.at<double>(0, 0) > maxR.mposition.x) maxR.mposition.x = rvec.at<double>(0, 0);
            //     if (rvec.at<double>(1, 0) > maxR.mposition.y) maxR.mposition.y = rvec.at<double>(1, 0);
            //     if (rvec.at<double>(2, 0) > maxR.mposition.z) maxR.mposition.z = rvec.at<double>(2, 0);

            //     if (tvec.at<double>(0, 0) < minT.mposition.x) minT.mposition.x = tvec.at<double>(0, 0);
            //     if (tvec.at<double>(1, 0) < minT.mposition.y) minT.mposition.y = tvec.at<double>(1, 0);
            //     if (tvec.at<double>(2, 0) < minT.mposition.z) minT.mposition.z = tvec.at<double>(2, 0);
            //     if (rvec.at<double>(0, 0) < minR.mposition.x) minR.mposition.x = rvec.at<double>(0, 0);
            //     if (rvec.at<double>(1, 0) < minR.mposition.y) minR.mposition.y = rvec.at<double>(1, 0);
            //     if (rvec.at<double>(2, 0) < minR.mposition.z) minR.mposition.z = rvec.at<double>(2, 0);

            //     RCLCPP_INFO(this->get_logger(), "三维位移范围：[(%.2f,%.2f),(%.2f,%.2f) ,(%.2f, %.2f)]",
            //    maxT.mposition.x,minT.mposition.x, maxT.mposition.y,minT.mposition.y, maxT.mposition.z,minT.mposition.z);
            //     RCLCPP_INFO(this->get_logger(), "三维旋转范围：[(%.2f,%.2f),(%.2f,%.2f) ,(%.2f, %.2f)]",
            //     maxR.mposition.x,minR.mposition.x, maxR.mposition.y, minR.mposition.y, maxR.mposition.z,minR.mposition.z);

                f3 speed;
                speed = getSpeed(position_current, position_last);
                speed_last.mspeed = speed_current.mspeed;
                speed_current.mspeed.x = speed.x;
                speed_current.mspeed.y = speed.y;
                speed_current.mspeed.z = speed.z;

                f3 speedofspeed;
                speedofspeed = getSpeedOfSpeed(speed_current,speed_last);
                RCLCPP_INFO(this->get_logger(), "速度：    (%.2f, %.2f, %.2f)",speed.x,speed.y,speed.z);
                RCLCPP_INFO(this->get_logger(), "加速度：  (%.2f, %.2f, %.2f)\n"
                    ,speedofspeed.x,speedofspeed.y,speedofspeed.z);
            }

            // 显示图像
            cv::imshow("view", afterFindImage);
            cv::waitKey(3);
        } catch (cv_bridge::Exception& e) {
            RCLCPP_ERROR(this->get_logger(), "Could not convert from '%s' to 'bgr8'.", msg->encoding.c_str());
        }
    }

    cv::Mat preprocessImage(const cv::Mat& inputImage) {
        cv::Mat processedImage;
        cv::cvtColor(inputImage, processedImage, cv::COLOR_BGR2GRAY);
        cv::GaussianBlur(processedImage, processedImage, cv::Size(11, 11), 0);
        cv::adaptiveThreshold(processedImage, processedImage, 255,
                             cv::ADAPTIVE_THRESH_GAUSSIAN_C,
                             cv::THRESH_BINARY_INV, 11, 5);

        cv::Mat kernel = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(6, 6));
        cv::morphologyEx(processedImage, processedImage, cv::MORPH_CLOSE, kernel, cv::Point(-1, -1), 3);

        return processedImage;
    }


    cv::Mat findBar(const cv::Mat& inputImage, const cv::Mat& originalImage, std::vector<cv::Point2d>& quadPoints) {
        // 保持原有实现不变
        cv::Mat processedImage;
        int lowThreshold = 0;
        const int ratio = 3;
        const int kernel_size = 3;
        lowThreshold = 50;
        cv::Canny(inputImage, processedImage, lowThreshold, lowThreshold * ratio, kernel_size);

        cv::Mat canny_output = processedImage;
        std::vector<std::vector<cv::Point>> contours;
        cv::findContours(canny_output, contours, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE, cv::Point(0, 0));

        std::vector<cv::RotatedRect> minRect(contours.size());
        std::vector<cv::RotatedRect> minEllipse(contours.size());
        std::vector<cv::RotatedRect> allEllipses;

        for (size_t i = 0; i < contours.size(); i++) {
            minRect[i] = cv::minAreaRect(contours[i]);
            if (contours[i].size() > 5) {
                minEllipse[i] = cv::fitEllipse(contours[i]);
                float majorAxis = std::max(minEllipse[i].size.width, minEllipse[i].size.height);
                if (majorAxis > 30.0f) {
                    bool isDuplicate = false;
                    cv::Point2f currentCenter = minEllipse[i].center;
                    for (const auto& existingEllipse : allEllipses) {
                        float distance = cv::norm(currentCenter - existingEllipse.center);
                        if (distance < 15.0f) {
                            isDuplicate = true;
                            break;
                        }
                    }
                    if (!isDuplicate) {
                        allEllipses.push_back(minEllipse[i]);
                    }
                }
            }
        }

        cv::Mat drawing = originalImage.clone();
        if (allEllipses.size() >= 2) {
            int idx1 = 0, idx2 = 1;
            float minScore = goodPair(allEllipses[0], allEllipses[1], originalImage);
            for (size_t i = 0; i < allEllipses.size(); i++) {
                for (size_t j = i + 1; j < allEllipses.size(); j++) {
                    float diff = goodPair(allEllipses[i], allEllipses[j], originalImage);
                    if (diff < minScore) {
                        minScore = diff;
                        idx1 = i;
                        idx2 = j;
                    }
                }
            }

            auto [p1, p2] = getMajorAxisEndpoints(allEllipses[idx1]);
            auto [p3, p4] = getMajorAxisEndpoints(allEllipses[idx2]);

            cv::Scalar green(0, 255, 0);
            if (p1.x<p3.x){
                quadPoints = {p1, p2, p4, p3};
            }
            else {
                quadPoints = {p3, p4, p2, p1};
            }

            for (size_t k = 0; k < 4; k++) {
                cv::line(drawing, quadPoints[k], quadPoints[(k + 1) % 4], green, 2);
            }

            for (size_t i = 0; i < 4; ++i) {
            // 绘制红色圆圈标记点
            cv::circle(drawing, quadPoints[i], 5, cv::Scalar(0, 0, 255), 2);
            // 标注序号
            std::string text = std::to_string(i); // 序号：0、1、2、3
            cv::putText(drawing, text, cv::Point(static_cast<int>(quadPoints[i].x + 10), static_cast<int>(quadPoints[i].y)),
                        cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(255, 255, 255), 2);
            }
        }
        return drawing;
    }

    //goodPair评价权重
    int a=200;
    int b=650;
    int c=100;
    int d=10;
    int e=500;

    //利用椭圆角度、两椭圆中心距离、面积比、中心区域平均灰度值来判断是否是目标，即成对灯条
    float goodPair(const cv::RotatedRect& ellipse1, const cv::RotatedRect& ellipse2, const cv::Mat& originalImage) {

        float angle1 = cv::abs(cv::abs(ellipse1.angle-90)-90);
        float angle2 = cv::abs(cv::abs(ellipse2.angle-90)-90);
        float angle = angle1+angle2;
        if ((angle1>=30) || (angle2>=30)) angle = 100000;

        float height = cv::abs(ellipse1.center.y - ellipse2.center.y);

        float s1 = 3.14 * (ellipse1.size.width / 2.0f) * (ellipse1.size.height / 2.0f);
        float s2 = 3.14 * (ellipse2.size.width / 2.0f) * (ellipse2.size.height / 2.0f);
        float area = std::max(s1, s2) / std::min(s1, s2);
        if (area>2) area = 100000;

        float width = cv::abs(ellipse1.center.x - ellipse2.center.x);
        
        auto [p1, p2] = getMajorAxisEndpoints(ellipse1);
        auto [p3, p4] = getMajorAxisEndpoints(ellipse2);
        auto p5 = ((p1+p2+p3+p4)/4+p1)/2;
        auto p6 = ((p1+p2+p3+p4)/4+p2)/2;
        auto p7 = ((p1+p2+p3+p4)/4+p3)/2;
        auto p8 = ((p1+p2+p3+p4)/4+p4)/2;
        std::vector<cv::Point2f> quadPoints = {p5, p6, p8, p7};
        cv::Mat mask = cv::Mat::zeros(originalImage.size(), CV_8UC1);
        std::vector<std::vector<cv::Point>> contours;
        std::vector<cv::Point> pts;
        for (const auto& p : quadPoints) {
            pts.push_back(cv::Point(static_cast<int>(p.x), static_cast<int>(p.y)));
        }
        contours.push_back(pts);
        cv::fillPoly(mask, contours, cv::Scalar(255));
        cv::Mat grayImage;
        cv::cvtColor(originalImage, grayImage, cv::COLOR_BGR2GRAY);
        cv::Scalar meanVal = cv::mean(grayImage, mask);
        float avgGray = meanVal[0];
        double minVal, maxVal;
        cv::minMaxLoc(grayImage, &minVal, &maxVal, nullptr, nullptr, mask);
        float maxGray = static_cast<float>(maxVal);
        if (maxGray > 100.0f) {
            avgGray = 0.0f;
        } 

        float mark = angle*a + height*b + area*c + width*d - avgGray*e;
        return mark;
    }
    
    std::pair<cv::Point2f, cv::Point2f> getMajorAxisEndpoints(const cv::RotatedRect& ellipse) {
        float majorHalf = ellipse.size.width / 2.0f;
        cv::Point2f endpoint1 = ellipse.center + cv::Point2f(0, -2*majorHalf);
        cv::Point2f endpoint2 = ellipse.center + cv::Point2f(0, 2*majorHalf);
        return {endpoint1, endpoint2};
    }
    std::shared_ptr<rclcpp::Subscription<sensor_msgs::msg::Image>> sub_;
};
};
RCLCPP_COMPONENTS_REGISTER_NODE(palomino::BoardDetection)