#include "../include/param.h"
#include "../include/detector.h"
#include "../include/localization.h"
#include "../include/dip_utils.h"
#include "../include/planner.h"
#include <ros/ros.h>
#include <iostream>
#include <opencv2/opencv.hpp>
#include <Eigen/Dense>
#include <boost/thread.hpp>
#include <boost/atomic.hpp>
#include <chrono>
//msg
#include <cv_bridge/cv_bridge.h>
#include <image_transport/image_transport.h>
#include <dip_msg/web_command_msg.h>
#include <dip_msg/visualize_msg.h>
#include <sensor_msgs/Imu.h>
#include <geometry_msgs/Twist.h>


cv::Mat rgb_frame;//彩色图
cv_bridge::CvImagePtr rgb_ptr;
cv::Mat depth_frame;//深度图
cv_bridge::CvImagePtr depth_ptr;
cv::Mat visualize_frame;//可视化图像

dip_msg::web_command_msg web_command;
dip_msg::visualize_msg visualize_data; 
bool  reset_param_flag = false;//根据接受到的command确定是否reset参数
sensor_msgs::Imu imu_data; //接受imu信息
geometry_msgs::Twist robot_vel_data;//接受机器人速度信息,实际上并不是真正意义的反馈
geometry_msgs::Twist vel_command;//发送信息

void RGBCallback(const sensor_msgs::ImageConstPtr& color_msg)
{
    try
    {
        rgb_ptr = cv_bridge::toCvCopy(color_msg, sensor_msgs::image_encodings::BGR8);    
    }
    catch (cv_bridge::Exception& e )
    {
        ROS_ERROR("Could not convert from '%s' to 'bgr8'.", color_msg->encoding.c_str());
    }
    rgb_frame = rgb_ptr->image;
}

void DepthCallback(const sensor_msgs::ImageConstPtr& depth_msg)
{
    try
    {
      //cv::imshow("color_view", cv_bridge::toCvShare(color_msg, sensor_msgs::image_encodings::BGR8)->image);
      //depth_ptr = cv_bridge::toCvCopy(depth_msg, sensor_msgs::image_encodings::TYPE_32FC1);
      //存储方式为32fc1,nm,实际print竟然是0-255,float32和int16_t差不到,topic echo的有问题
      depth_ptr = cv_bridge::toCvCopy(depth_msg, sensor_msgs::image_encodings::TYPE_16UC1);
      //https://blog.csdn.net/qq_32761549/article/details/105373669    
    }
    catch (cv_bridge::Exception& e )
    {
        ROS_ERROR("Could not convert from '%s' to '16UC1'.", depth_msg->encoding.c_str());
        return;
    }
    depth_frame = depth_ptr->image;
}

void IMUCallback(const sensor_msgs::ImuConstPtr &_imu_msg)
{
    imu_data = *_imu_msg;
    imu_data.angular_velocity.z *=100;//不知道为啥需要x100
}

void VelCallback(const geometry_msgs::TwistConstPtr &_robot_vel)
{
    robot_vel_data = *_robot_vel;
}

void CommandCallback(const dip_msg::web_command_msgConstPtr &command)
{
    if (web_command.left_h_min != command->left_h_min){Param::LC_H_MIN = command->left_h_min;reset_param_flag = true;}
    if (web_command.left_h_max != command->left_h_max){Param::LC_H_MAX = command->left_h_max;reset_param_flag = true;}
    if (web_command.right_h_min != command->right_h_min){Param::RC_H_MIN = command->right_h_min;reset_param_flag = true;}
    if (web_command.right_h_max != command->right_h_max){Param::RC_H_MAX = command->right_h_max;reset_param_flag = true;}
    if (web_command.s_min != command->s_min)
    {
        Param::LC_S_MIN = command->s_min;
        Param::RC_S_MIN = command->s_min;
        reset_param_flag = true;
    }
    if (web_command.s_max != command->s_max)
    {
        Param::RC_S_MAX = command->s_max;
        Param::LC_S_MAX = command->s_max;
        reset_param_flag = true;
    }
    if (web_command.v_min != command->v_min)
    {
        Param::RC_V_MIN = command->v_min;
        Param::LC_V_MIN = command->v_min;
        reset_param_flag = true;
    }
    if (web_command.v_max != command->v_max)
    {
        Param::RC_V_MAX = command->v_max;
        Param::LC_V_MAX = command->v_max;
        reset_param_flag = true;
    }
    web_command = *command;
}

int main(int argc,char **argv)
{
    //ros init
    ros::init(argc,argv,"localization_node");
    ros::NodeHandle nh;
    ros::Subscriber rgb_frame_sub = nh.subscribe("/camera/rgb/image_raw",1,&RGBCallback);
    ros::Subscriber depth_frame_sub = nh.subscribe("/camera/depth_registered/image",1,&DepthCallback);
    ros::Subscriber command_sub = nh.subscribe("/web_command",1,&CommandCallback);
    ros::Subscriber imu_sub = nh.subscribe("/imu",1,&IMUCallback);
    ros::Subscriber robot_vel_sub = nh.subscribe("/robot_cmd_vel",1,&VelCallback);
    image_transport::ImageTransport it(nh);
    image_transport::Publisher visualize_mat_pub = it.advertise("/visualize_mat",1); //图像发布节点
    ros::Publisher visualize_msg_pub = nh.advertise<dip_msg::visualize_msg>("/visualize_msg",1);//可视化信息发布节点
    ros::Publisher vel_cmd_pub = nh.advertise<geometry_msgs::Twist>("/cmd_vel",1);//指令发送节点
    ROS_INFO("--------LOCALIZATION NODE--------");
    Param::loadParam("/home/lin/hitsz-dip/src/dip_practicum/config/param.yaml");
    Param::print_debug_string();
    dip::ColumnDetector detector;
    dip::Localization localization;
    dip::GlobalPlanner global_planner;
    dip::LocalPlanner local_planner;
    ros::Rate loop_rate(30);
    ros::AsyncSpinner spinner(1);
    spinner.start();

    //任务相关变量
    std::vector<cv::Mat> binary_imgs;//二值化图像序列
    std::vector<cv::Point> column_centers;//圆柱图像中心点
    std::vector<Eigen::Vector3f> camera_points;//相机坐标系下坐标
    std::vector<Eigen::Vector2f> proj_points;//投影后坐标
    proj_points.resize(2);
    Eigen::Vector2f proj_camera_org;//投影后相机世界坐标
    Eigen::Vector2f filter_proj_cam_org;//滤波投影后相机世界坐标
    Eigen::Vector2f predict_proj_cam_org;//预测投影后相机世界坐标
    sensor_msgs::ImagePtr visualize_mat_msg = nullptr;
    while(nh.ok())
    {
        if (reset_param_flag)
        {
            detector.reset();
            // localization.reset();
            reset_param_flag = false;
            std::cout<<"[MAIN] :Param reset"<<std::endl;
            Param::print_debug_string();
        }
        if (rgb_frame.empty() || depth_frame.empty()){continue;}
        auto t_start = std::chrono::high_resolution_clock::now();
        //task switch
        auto new_task = dip::int2task(web_command.task);
        global_planner.switch_task_status(new_task);
        bool res = detector.detect_column(rgb_frame,depth_frame,binary_imgs,column_centers,camera_points);
        for (int i=0;i<column_centers.size();i++)
        {
            std::cout<<"image point:"<<column_centers[i].x<<","<<column_centers[i].y<<std::endl;
        }
        for(int i=0;i<camera_points.size();i++)
        {
            std::cout<<"point in camera axis:"<<camera_points[i](0,0)<<","<<camera_points[i](1,0)<<camera_points[i](2,0)<<std::endl;
        }
        if (!res){
            //如果无观测
            //predict & get predict x y yaw
            Eigen::Vector2f vel;
            vel<<robot_vel_data.linear.x,robot_vel_data.linear.y;

            Eigen::VectorXf predict_pose(2);
            float predict_yaw;
            bool inited = localization.predict(vel,imu_data.angular_velocity.z,0.03,predict_pose,predict_yaw);
            if (!inited){continue;}
            Eigen::Vector3f pose;
            pose<<predict_pose(0,0),predict_pose(1,0),predict_yaw;
            global_planner.update(false,pose);
            local_planner.update(global_planner.pose_trj,pose);
            vel_command = local_planner.command;

            visualize_data.predict_x = predict_pose(0,0);
            visualize_data.predict_y = predict_pose(1,0);
            visualize_data.predict_yaw = predict_yaw;
            //get cov
            localization.get_coords_cov(visualize_data.cov_x,visualize_data.cov_y);

            dip::get_visualize_mat(binary_imgs,column_centers,visualize_frame);
            visualize_mat_msg = cv_bridge::CvImage(std_msgs::Header(),"bgr8",visualize_frame).toImageMsg();
            visualize_mat_pub.publish(visualize_mat_msg);
            visualize_msg_pub.publish(visualize_data);
            vel_cmd_pub.publish(vel_command);
            continue;
        }
        
        proj_points[0]<<camera_points[0](0,0),camera_points[0](1,0);
        proj_points[1]<<camera_points[1](0,0),camera_points[1](1,0);
        localization.update(proj_points[0],proj_points[1]);

        auto t_end = std::chrono::high_resolution_clock::now();
        auto total_time = std::chrono::duration<float,std::milli>(t_end-t_start).count();
        std::cout<<"cost time:"<<total_time<< "ms"<<std::endl;
        localization.get_calc_coords(proj_camera_org);
        // std::cout<<proj_camera_org<<std::endl;
        visualize_data.camera_pose_x = proj_camera_org(0,0);
        visualize_data.camera_pose_y = proj_camera_org(1,0);
        visualize_data.camera_pose_theta = localization.get_calc_yaw();
        //get filter yaw
        localization.get_filter_coords(filter_proj_cam_org);
        visualize_data.filter_x = filter_proj_cam_org(0,0);
        visualize_data.filter_y = filter_proj_cam_org(1,0);
        visualize_data.filter_yaw = localization.get_filter_yaw();
        //predict & get predict x y yaw
        Eigen::Vector2f vel;
        vel<<robot_vel_data.linear.x,robot_vel_data.linear.y;

        Eigen::VectorXf predict_pose(2);
        float predict_yaw;
        bool inited = localization.predict(vel,imu_data.angular_velocity.z,0.03,predict_pose,predict_yaw);
        if (!inited)
        {
            std::cout<<"[Localization] :Predict failed,must init "<<std::endl;
            continue;
        }
        Eigen::Vector3f pose;
        pose<<predict_pose(0,0),predict_pose(1,0),predict_yaw;
        global_planner.update(true,pose);
        local_planner.update(global_planner.pose_trj,pose);
        vel_command = local_planner.command;

        visualize_data.predict_x = predict_pose(0,0);
        visualize_data.predict_y = predict_pose(1,0);
        visualize_data.predict_yaw = predict_yaw;

        //get cov
        localization.get_coords_cov(visualize_data.cov_x,visualize_data.cov_y);
        //publish
        dip::get_visualize_mat(binary_imgs,column_centers,visualize_frame);
        visualize_mat_msg = cv_bridge::CvImage(std_msgs::Header(),"bgr8",visualize_frame).toImageMsg();
        visualize_mat_pub.publish(visualize_mat_msg);
        visualize_msg_pub.publish(visualize_data);
        vel_cmd_pub.publish(vel_command);
        // cv::imshow("threshold",visualize_frame);
        // cv::waitKey(0);
    }
    return 0;
}