#include <ros/ros.h>
#include <sensor_msgs/PointCloud2.h>
#include <geometry_msgs/TransformStamped.h>
#include <tf2_geometry_msgs/tf2_geometry_msgs.h>
#include <tf2_ros/transform_listener.h>
#include <tf2_sensor_msgs/tf2_sensor_msgs.h>
#include <tf/transform_broadcaster.h>

#include <pcl_ros/point_cloud.h>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl_ros/transforms.h>
#include <pcl/point_types.h>

#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <sensor_msgs/Image.h>


#include<cv_bridge/cv_bridge.h>
// #include "cv_bridge/cv_bridge.h"
#include<opencv2/opencv.hpp>
#include <image_transport/image_transport.h>


#include <nav_msgs/Odometry.h>

//#include <pcl/conversions.h>
#include <map>
#include <pcl/filters/passthrough.h>
#include <chrono>
#include <thread>
#include"depth_rgb_handle/rgb_depth_img.hpp"
#include"depth_rgb_handle/calib.hpp"
// #include"depth_rgb_handle/rgb_lidar.hpp"
#include"depth_rgb_handle/pc_handle.hpp"
#include"depth_rgb_handle/utils.hpp"
// #include <X11/Xlib.h>

using namespace std;
using PointType = pcl::PointXYZI;


//rgb depth img handle
RgbDepthHandle rgb_depth_handle_;

//pc handle
PCHandle pc_handle_;

//gui 
cv::String window_name_rs_ = "realsense";

//kb
string kb_inf_rs_;
// mutex kb_lock_;

//display
ros::Publisher  cloud_pub_, bbox_pub_, pc_plane_pub_, bbox_plane_pub_, pc_plane_back_pub_;


void display_th(){
    if(rgb_depth_handle_.get_is_get_obj()){
        // display_pc(pc_handle_.get_pc(), cloud_pub_);
        display_bbox(pc_handle_.get_global_inf(), bbox_pub_);
        // display_pc_for_measure(pc_handle_.get_global_inf(), pc_plane_pub_, bbox_plane_pub_);
        display_pc_for_measure_one(pc_handle_.get_global_inf(), pc_plane_back_pub_);
    }
}

void get_key_th(){
    kb_inf_rs_ = get_keyboard();  
}

void save_data_th(){
        // ROS_INFO("rs_handle callback ok");
        pc_handle_.get_object_pc(rgb_depth_handle_.get_object_points_rs());
        // ROS_INFO("rs_handle ok");
        pc_handle_.get_bbox_save();
        // pc_handle_.transform_save_pc(pc_handle_.get_local_obj_inf());
}

void capture_event_rs(int event, int x, int y, int flags, void *params){
    if (event == cv::EVENT_LBUTTONDOWN) {
        rgb_depth_handle_.record_point(x, y);
    }
    if (event == cv::EVENT_RBUTTONDOWN) {

    }
}

void img_CB(const sensor_msgs::ImageConstPtr& rgb_img_msg, const sensor_msgs::ImageConstPtr& depth_img_msg){
    float T_base2rs[12] = {    0.0111,  -0.9999,    0.0045,     0.1734,
            -0.0084,   -0.0046,   -1.0000,     0.0779,
                0.9999,    0.0110,  -0.0084,   -0.1902}; //to do get tf from hand

    rgb_depth_handle_.get_img_extrinsics(cv_bridge::toCvCopy(rgb_img_msg, sensor_msgs::image_encodings::BGR8)->image, 
                                                                                        cv_bridge::toCvCopy(depth_img_msg, sensor_msgs::image_encodings::TYPE_16UC1)->image, T_base2rs,
                                                                                        rgb_img_msg->header.stamp);
    cv::namedWindow(window_name_rs_,CV_WINDOW_AUTOSIZE);
    cv::moveWindow(window_name_rs_, 10,10);
    cv::setMouseCallback(window_name_rs_, capture_event_rs);

    //debug
    // display_pc(pc_handle_.local_object_pc_.p_object_pc, cloud_pub_);
    //debug

    if(kb_inf_rs_=="save"){
        save_data_th();
    }
    rgb_depth_handle_.draw_contours(kb_inf_rs_);  //don't use & it will have a bug 
    kb_inf_rs_ = string();

    cv::imshow(window_name_rs_, rgb_depth_handle_.get_img_show());
    // cv::waitKey(1);
    get_key_th();
    display_th();
}

int main(int argc, char* argv[])
{
    ros::init(argc, argv, "realsense_handle");
    ros::NodeHandle nh;
    // message_filters::Subscriber<sensor_msgs::Image> image_front_sub(nh, "/rs_hand/color/image_raw", 5);
    message_filters::Subscriber<sensor_msgs::Image> image_front_sub(nh, "/camera/color/image_raw", 5);
    // message_filters::Subscriber<sensor_msgs::Image> depth_image_sub(nh, "/rs_hand/aligned_depth_to_color/image_raw", 5);
    message_filters::Subscriber<sensor_msgs::Image> depth_image_sub(nh, "/camera/aligned_depth_to_color/image_raw", 5);
    typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image> MySyncPolicy;
    // ApproximateTime takes a queue size as its constructor argument, hence MySyncPolicy(10)
    message_filters::Synchronizer<MySyncPolicy> sync(MySyncPolicy(20), image_front_sub, depth_image_sub);
    sync.setMaxIntervalDuration(ros::Duration(0.05));
    sync.registerCallback(boost::bind(&img_CB, _1, _2));

    // ros::Subscriber odom_sub = nh.subscribe<nav_msgs::Odometry>("/lio_sam/mapping/odometry", 1, odomCB);

    // cloud_pub_ = nh.advertise<sensor_msgs::PointCloud2>("rs/object_pc",1);
    bbox_pub_=nh.advertise<visualization_msgs::MarkerArray>("rs/bbox", 1);
    // pc_plane_pub_ = nh.advertise<sensor_msgs::PointCloud2>("rs/object_pc_for_measure",1);
    // bbox_plane_pub_=nh.advertise<visualization_msgs::MarkerArray>("rs/bbox_for_measure", 1);
    pc_plane_back_pub_ = nh.advertise<sensor_msgs::PointCloud2>("rs/object_pc_back_for_measure",1);

    ros::spin();

    return 0;
}
