#include "deepdetector/RosDeepDetector.hpp"
namespace wmj
{
    DeepDetector_Node::DeepDetector_Node(const rclcpp::NodeOptions &options) : rclcpp::Node("listener", options)
    {
        RCLCPP_INFO(this->get_logger(), "detector_component get is working ");
        DetectorParams params;
        // 初始化深度实例
        this->getParams(params);  
        m_deepDetector = std::make_shared<wmj::DeepDetector>(params);
        rmw_qos_profile_t qos_t;
        qos_t.reliability = RMW_QOS_POLICY_RELIABILITY_BEST_EFFORT;
        qos_t.history = RMW_QOS_POLICY_HISTORY_KEEP_LAST;
        qos_t.depth = 1;
        qos_t.durability = RMW_QOS_POLICY_DURABILITY_VOLATILE;
        qos_t.deadline = RMW_QOS_DEADLINE_DEFAULT;
        qos_t.lifespan = RMW_QOS_LIFESPAN_DEFAULT;
        qos_t.liveliness = RMW_QOS_POLICY_LIVELINESS_SYSTEM_DEFAULT;
        qos_t.liveliness_lease_duration = RMW_QOS_LIVELINESS_LEASE_DURATION_DEFAULT;
        qos_t.avoid_ros_namespace_conventions = false;

        rclcpp::QoSInitialization qosInitialization = rclcpp::QoSInitialization::from_rmw(qos_t);
        rclcpp::QoS qos = rclcpp::QoS(qosInitialization, qos_t);
            
        // 声明各种订阅者和发布者
        // TODO::改参数
        roi_init_ = false;
        m_track_lost_cnt = 0;
        imageSub = this->create_subscription<sensor_msgs::msg::Image>("image_raw", qos, std::bind(&DeepDetector_Node::imageCallBack, this, std::placeholders::_1));
        // roiSub = this->create_subscription<base_interfaces::msg::Roi>("my_roi", 1, std::bind(&DeepDetector_Node::roiCallBack, this, std::placeholders::_1));
        armorsPub = this->create_publisher<base_interfaces::msg::Armors>("Armors", 2);
        ScanmodeSub = this->create_subscription<base_interfaces::msg::ScanCtrlInfo>(
            "ScanCtrlInfo", 10, std::bind(&DeepDetector_Node::scanCtrlInfoCallback, this, std::placeholders::_1));        // left_image_sub_.subscribe(this, "left_image_raw",qos_t);
        // right_image_sub_.subscribe(this, "right_image_raw",qos_t);
        // sync_ = std::make_shared<message_filters::Synchronizer<MySyncPolicy>>(MySyncPolicy(2),left_image_sub_, right_image_sub_);
        // sync_->registerCallback(std::bind(&DeepDetector_Node::sync_images_callback_, this, _1, _2));

        // 声明参数服务器
        // this->declare_parameter<std::int32_t>("input_width", m_deepDetector->input_width);
        // this->declare_parameter<std::int32_t>("input_height", m_deepDetector->input_height);
        // RCLCPP_INFO(this->get_logger(), "deepdetector input_width:%d deepdetector input_height:%d", m_deepDetector->input_width, m_deepDetector->input_height);
    }


    /**
     * @brief 深度回调函数
     *
     * @param msg Sensor智能指针
     */
    void DeepDetector_Node::imageCallBack(const sensor_msgs::msg::Image::ConstSharedPtr& msg)
    {
        auto stamp = this->get_clock()->now();
        builtin_interfaces::msg::Time t= stamp;
        double a = t.sec + t.nanosec*1e-9;
        double b = msg->header.stamp.sec + msg->header.stamp.nanosec*1e-9;
        // std::cout << "Time2: " << a - b << std::endl;
        base_interfaces::msg::Armors armors_msg;
        double cloest_distance = 0;  // 最近距离
        // 获取图像
        cv::Mat image = DeepDetector_Node::getImg(msg);
        // if(m_roi.height() > msg->height)
        if(!roi_init_)   
        {
            m_camera_resolution = cv::Rect_<double>(0, 0, msg->width-1, msg->height-1);
            setDeepRoiSize(m_camera_resolution);
            roi_init_ = true;
        } 
        // m_img = MatWithTime(image, msg->header.stamp.nanosec, "single");
        // 核心函数，做深度识别
        
        m_deepDetector->DeepDetectSingle(image, m_roi);
        armors_msg.num = m_deepDetector->m_armors.size();
        setROI(m_deepDetector -> m_armors);
        for (auto armor : m_deepDetector -> m_armors)
        {
            base_interfaces::msg::Armor armor_msg;
            armor_msg.armor_type = armor.m_armor_type;
            armor_msg.id = armor.m_id;
            armor_msg.color = armor.m_color;
            for (int j = 0; j < 4; j++)
            {
                base_interfaces::msg::Vertice vertice;
                vertice.position_x = armor.m_vertices[j].x;
                vertice.position_y = armor.m_vertices[j].y;
                armor_msg.vertices.push_back(vertice);
            }
            armor_msg.header = msg->header;
            armors_msg.armors.push_back(armor_msg);
        }
        armors_msg.num = armors_msg.armors.size();
        armors_msg.header = msg->header;
        armorsPub->publish(armors_msg);
        // 可视化深度识别结果
        if( m_deepDetector->m_debug )
            m_deepDetector->DebugOutput();
    }

    /**
     * @brief Roi回调函数
     *
     * @param msg Roi智能指针
     */
    void DeepDetector_Node::roiCallBack(const base_interfaces::msg::Roi::SharedPtr msg)
    {
        if (msg->roi_width == 0 || msg->roi_height == 0)
        {
            // m_roi = cv::Rect_<double>(0, 0, 1279, 1023);
            // RCLCPP_INFO(rclcpp::get_logger("ERROR MSG"),"defualt msg");
        }
        else
        {
            m_roi = cv::Rect_<double>(msg->roi_x, msg->roi_y, msg->roi_width, msg->roi_height);
            // RCLCPP_INFO(rclcpp::get_logger("ERROR MSG"),"set msg");
        }
    }

    /**
     * @brief 图像获取函数,将sensor类型转换为Mat类型，需要依赖cv_bridge
     *
     * @param img sensor_msgs::Image智能指针
     */
    cv::Mat DeepDetector_Node::getImg(const sensor_msgs::msg::Image::ConstSharedPtr& img)
    {
        // 声明 cv::image 指针，内含cv::Mat
        cv_bridge::CvImageConstPtr cv_ptr;

        // 获取图像，最终图像通过 cv_ptr->image 获取，为cv::Mat型
        try
        {
            cv_ptr = cv_bridge::toCvShare(img);
        }
        catch (cv_bridge::Exception &e)
        {
            printf("cv_bridge exception:%s", e.what());
        }
        return (cv_ptr->image);
    }

    /**
     * @brief 读ROS参数
     */
    void DeepDetector_Node::getParams(DetectorParams &params)
    {
        
        this->declare_parameter<int>("enemyColor", params.enemyColor);
        this->declare_parameter<std::string>("DEVICE", params.DEVICE);
        this->declare_parameter<float>("lightRatio", params.lightRatio);
        this->declare_parameter<float>("nmsThreshold", params.nmsThreshold);
        this->declare_parameter<float>("confThreshold", params.confThreshold);
        this->declare_parameter<std::string>("modelName", params.modelName);
        this->declare_parameter<std::string>("modelFormat", params.modelFormat);
        this->declare_parameter<std::string>("modelType", params.modelType);
        this->declare_parameter<std::string>("inputPrecision", params.inputPrecision);
        this->declare_parameter<int>("nType", params.nType);
        this->declare_parameter<int>("nColor", params.nColor);
        this->declare_parameter<int>("nTag", params.nTag);
        this->declare_parameter<bool>("debug", params.debug);


        this->get_parameter<int>("enemyColor", params.enemyColor);
        this->get_parameter<std::string>("DEVICE", params.DEVICE);
        this->get_parameter<float>("lightRatio", params.lightRatio);
        this->get_parameter<float>("nmsThreshold", params.nmsThreshold);
        this->get_parameter<float>("confThreshold", params.confThreshold);


        this->get_parameter<std::string>("modelName", params.modelName);
        this->get_parameter<std::string>("modelFormat", params.modelFormat);
        this->get_parameter<std::string>("modelType", params.modelType);
        this->get_parameter<std::string>("inputPrecision", params.inputPrecision);

        this->get_parameter<int>("nType", params.nType);
        this->get_parameter<int>("nColor", params.nColor);
        this->get_parameter<int>("nTag", params.nTag);
        this->get_parameter<bool>("debug", params.debug);

        // std::cout << "enemy_color" << enemy_color << std::endl;
        return;
    }

    void DeepDetector_Node::setDeepRoiSize(cv::Rect2d camera_resolution)
    {   
        m_deep_roi_size = m_deepDetector->getInputSize();
        double rate = (double)m_deep_roi_size.height / (double)m_deep_roi_size.width;
        m_deep_default_roi = cv::Rect2d(
            0,
            (camera_resolution.height - camera_resolution.width * rate) / 2,
            camera_resolution.width,
            (camera_resolution.width * rate)
        );
        m_roi = m_deep_default_roi;
        
    }


    void DeepDetector_Node::setROI(const Armors &armors)
    {
        ROI roi;

        std::vector<cv::Point2f> vertices;
        bool all_white = true;
        for (auto &armor : armors)
        {
            if(armor.m_color != _COLOR::_WHITE)
                all_white = false;
            for (int i = 0; i < 4; i++)
            {
                vertices.emplace_back(armor.m_vertices[i]);
            }
        }
        if (armors.empty() || all_white) 
        {
            // if (m_track_lost_cnt > m_roi_params->m_max_track_lost)
            if (m_track_lost_cnt > 10)
            {
                // roi返回为中央roi
                m_deep_roi_state = wmj::DeepROISizeState::ROI_BIG;
                m_roi = m_deep_default_roi;
                return;
            }
            else
            {
                m_track_lost_cnt++;
                return;
            }

        }
        else
        {
            m_track_lost_cnt = 0;
        }
        if(m_isscan)
        {
            m_deep_roi_state = wmj::DeepROISizeState::ROI_BIG;
            m_roi = m_deep_default_roi;
            return;
        }
        float max_x, min_x, max_y, min_y;
        max_x = min_x = vertices[0].x;
        max_y = min_y = vertices[0].y;
        for (auto &point : vertices)
        {
            max_x = max_x > point.x ? max_x : point.x;
            min_x = min_x < point.x ? min_x : point.x;
            max_y = max_y > point.y ? max_y : point.y;
            min_y = min_y < point.y ? min_y : point.y;
        }
        float height,width;
        height = max_y - min_y;
        width = max_x - min_x;
        roi = cv::Rect2d(min_x - (1.3 - 1) * 0.5 * width, min_y - (1.3 - 1) * 0.5 * height, width * 1.3, height * 1.3);
        roi &= m_camera_resolution;
        if (roi.height * 1.5 > m_deep_roi_size.height || roi.width * 1.7 > m_deep_roi_size.width)
        {
            m_deep_roi_state = wmj::DeepROISizeState::ROI_BIG;
            roi = m_deep_default_roi; 
            roi.y = min_y + (height - roi.height) / 2.0;
            roi.y = std::max(std::min((float)roi.y, (float)(m_camera_resolution.height - 1 - m_deep_default_roi.height)), 1.f);
        }

        else if (height * 1.9 > m_deep_roi_size.height || width * 2.1 > m_deep_roi_size.width)
        {
            if (m_deep_roi_state == wmj::DeepROISizeState::ROI_SMALL)
            
            {
                roi.y += (roi.height - m_deep_roi_size.height) / 2.0;
                roi.height = m_deep_roi_size.height;
                roi.x += (roi.width - m_deep_roi_size.width) / 2.0;
                roi.width = m_deep_roi_size.width;
                roi.x = std::max(std::min((float)roi.x, (float)(m_camera_resolution.width - 1 - m_deep_roi_size.width)), 1.f);
                roi.y = std::max(std::min((float)roi.y, (float)(m_camera_resolution.height - 1 - m_deep_roi_size.height)), 1.f);
            }
            else
            {
                roi = m_deep_default_roi; 
                roi.y = min_y + (height - roi.height) / 2.0;
                roi.y = std::max(std::min((float)roi.y, (float)(m_camera_resolution.height - 1 - m_deep_default_roi.height)), 1.f);
            }
        }
        else
        {
            m_deep_roi_state = wmj::DeepROISizeState::ROI_SMALL;
            roi.y += (roi.height - m_deep_roi_size.height) / 2.0;
            roi.height = m_deep_roi_size.height < m_camera_resolution.height ? m_deep_roi_size.height: m_camera_resolution.height;
            roi.x += (roi.width - m_deep_roi_size.width) / 2.0;
            roi.width = m_deep_roi_size.width < m_camera_resolution.width ? m_deep_roi_size.width : m_camera_resolution.width ;
            roi.x = std::max(std::min((float)roi.x, (float)(m_camera_resolution.width - 1 - m_deep_roi_size.width)), 1.f);
            roi.y = std::max(std::min((float)roi.y, (float)(m_camera_resolution.height - 1 - m_deep_roi_size.height)), 1.f);
        }
        if (roi.area() == 0)
        {
            roi = m_deep_default_roi;
        }
        m_roi = roi;
        return;
    }
    // void DeepDetector_Node::sync_images_callback_(
    //         const sensor_msgs::msg::Image::ConstSharedPtr& left_image,
    //         const sensor_msgs::msg::Image::ConstSharedPtr& right_image)
    // {
    //     double a_time,b_time,c_time;
    //     a_time = left_image->header.stamp.sec + left_image->header.stamp.nanosec * 1e-9;
    //     b_time = right_image->header.stamp.sec + right_image->header.stamp.nanosec * 1e-9;
    //     builtin_interfaces::msg::Time t =now();
    //     c_time = t.sec + t.nanosec * 1e-9;

    //     // std::cout << "a: " << a_time << std::endl;
    //     // std::cout << "b - a: " << b_time - a_time << std::endl;
    //     // std::cout << "c: " << c_time << std::endl;

    //     cv::Mat left_img = DeepDetector_Node::getImg(left_image);
    //     cv::Mat right_img = DeepDetector_Node::getImg(right_image);
    //     t =now();
    //     double  start_time = t.sec + t.nanosec * 1e-9;
    //     // m_deepDetector->DeepDetectDouble(left_img,right_img,m_roi,m_roi);
    //     t =now();
    //     double  end_time = t.sec + t.nanosec * 1e-9;
    //     // std::cout << "getimg time: " << start_time - c_time << std::endl;

    //     base_interfaces::msg::Armors armors_msg;
    //     armors_msg.num = m_deepDetector -> m_armors.size() + m_deepDetector -> m_armors.size();
    //     for (auto armor : m_deepDetector -> m_armors)
    //     {
    //         // double distance = 0;         // 当前装甲板距离
    //         base_interfaces::msg::Armor armor_msg;
    //         armor_msg.armor_type = armor.m_armor_type;
    //         // armor.position_x = m_deepDetector->m_armors[i].m_position.x;
    //         // armor.position_y = m_deepDetector->m_armors[i].m_position.y;
    //         // armor.position_z = m_deepDetector->m_armors[i].m_position.z;
    //         // armor.yaw_angle = m_deepDetector->m_armors[i].m_yaw_angle;
    //         // armor_msg.time_seq = armor.m_time_seq;
    //         armor_msg.id = armor.m_id;
    //         armor_msg.color = armor.m_color;
    //         for (int j = 0; j < 4; j++)
    //         {
    //             base_interfaces::msg::Vertice vertice;
    //             vertice.position_x = armor.m_vertices[j].x;
    //             vertice.position_y = armor.m_vertices[j].y;
    //             armor_msg.vertices.push_back(vertice);
    //         }
    //         armor_msg.header = left_image->header;
    //         armors_msg.armors.push_back(armor_msg);
    //     }

    //     for (auto armor : m_deepDetector -> m_armors)
    //     {
    //         // double distance = 0;         // 当前装甲板距离
    //         base_interfaces::msg::Armor armor_msg;
    //         armor_msg.armor_type = armor.m_armor_type;
    //         // armor.position_x = m_deepDetector->m_armors[i].m_position.x;
    //         // armor.position_y = m_deepDetector->m_armors[i].m_position.y;
    //         // armor.position_z = m_deepDetector->m_armors[i].m_position.z;
    //         // armor.yaw_angle = m_deepDetector->m_armors[i].m_yaw_angle;
    //         // armor_msg.time_seq = armor.m_time_seq;
    //         armor_msg.id = armor.m_id;
    //         armor_msg.color = armor.m_color;
    //         for (int j = 0; j < 4; j++)
    //         {
    //             base_interfaces::msg::Vertice vertice;
    //             vertice.position_x = armor.m_vertices[j].x;
    //             vertice.position_y = armor.m_vertices[j].y;
    //             armor_msg.vertices.push_back(vertice);
    //         }
    //         armor_msg.header = right_image->header;
    //         armors_msg.armors.push_back(armor_msg);
    //     }
    //     if (armors_msg.num == 0)
    //     {
    //         base_interfaces::msg::Armor armor;
    //         base_interfaces::msg::Vertice vertice;
    //         armor.armor_type = 0;
    //         // armor.position_x = 0;
    //         // armor.position_y = 0;
    //         // armor.position_z = 0;
    //         armor.yaw_angle = 0;
    //         // armor.time_seq = 0;
    //         armor.id = 1;
    //         armor.color = 2;
    //         vertice.position_x = 0;
    //         vertice.position_y = 0;
    //         for (int i = 0; i < 4; i++)
    //             armor.vertices.push_back(vertice);
    //         armor.header = right_image->header;
    //         armor.header.frame_id = "None";
    //         armors_msg.armors.push_back(armor);
    //         armors_msg.distance = 0;
    //     }
    //     armors_msg.stamp = now();
    //     armorsPub->publish(armors_msg);
    //     builtin_interfaces::msg::Time t_ =now();
    //     double  d_time = t_.sec + t_.nanosec * 1e-9;
    //     std::cout << "infer time: " << end_time - start_time << std::endl;
    //     std::cout << "c - a: " << c_time - a_time << std::endl;
    //     std::cout << "time: " << a_time - stamp << std::endl;
    //     stamp = a_time;
    //     std::cout << "all round time: " << d_time - a_time << std::endl;
    //     std::cout << "armors_num: " << armors_msg.num << std::endl;



    //     // auto left_img = MatWithTime(image, msg->header.stamp.nanosec, "left");
    // }

    void DeepDetector_Node::scanCtrlInfoCallback(const base_interfaces::msg::ScanCtrlInfo::SharedPtr msg)
    {
        // RCLCPP_INFO(this->get_logger(), "Scan_mode: %d", msg->scan_mode);
        m_isscan = !(msg->scan_mode == 4);
        // RCLCPP_INFO(this->get_logger(), "Scan_mode: %d", m_isscan);
    }

}
#include "rclcpp_components/register_node_macro.hpp"

RCLCPP_COMPONENTS_REGISTER_NODE(wmj::DeepDetector_Node)