#include <ros/ros.h>
#include <sensor_msgs/PointCloud2.h>
#include <geometry_msgs/TransformStamped.h>
#include <tf2_geometry_msgs/tf2_geometry_msgs.h>
#include <tf2_ros/transform_listener.h>
#include <tf2_sensor_msgs/tf2_sensor_msgs.h>
#include <tf/transform_broadcaster.h>

#include <pcl_ros/point_cloud.h>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl_ros/transforms.h>
#include <pcl/point_types.h>

#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <sensor_msgs/Image.h>
#include<visualization_msgs/MarkerArray.h>
#include<visualization_msgs/Marker.h>

//#include<cv_bridge/cv_bridge.h>
#include "cv_bridge/cv_bridge.h"
#include<opencv2/opencv.hpp>
#include <image_transport/image_transport.h>


#include <nav_msgs/Odometry.h>

//#include <pcl/conversions.h>
#include <map>
#include <pcl/filters/passthrough.h>
#include <chrono>
#include"depth_img_handle_cpp/api_share_ros.cpp"
#include"depth_img_handle_cpp/calib.hpp"
#include"depth_img_handle_cpp/feature_match.hpp"

using namespace Eigen;
using namespace std;
using PointType = pcl::PointXYZI;

//img lidar calib
Calib calib_;

//send data
cv::Mat img_;
cv::Mat depth_img_;

//record data
std::chrono::system_clock::time_point t1_, t2_;

//dispaly
image_transport::Publisher image_pub_;

//stable judge
vector<Eigen::Vector4f> hand_objs_;
vector<Eigen::Vector4f> top_objs_;

//hand camera
FeatureMatch feature_match_;
Mat hand_img_;
Mat hand_depth_img_;
ros::Publisher hand_obj_pos_pub_;

//pub goal
ros::Publisher target_goal_pub_;
bool is_get_obj_pos = false;
float depth_bias_ = 0.5;

template <typename T >
void clearVector(std::vector<T> &vt){
    std::vector< T > vtTemp;
    vtTemp.swap( vt );
}

bool is_obj_stable(vector<Eigen::Vector4f> &pos_v){
    if (pos_v.size() <=0)
        return false;
    Eigen::Vector4f first_pos;
    first_pos = pos_v[0];
    for(int i = 1; i<pos_v.size();i++)
    {
        if((first_pos-pos_v[i]).norm()>0.1){
            clearVector(pos_v);
            return false;
        }
    }
    return true;
}

void limit_point_in_img(Point2i& point){
    if(point.x  < 0){
        point.x  = 0;
    }
    if(point.x  > 639){
        point.x  = 639;
    }
    if(point.y < 0){
        point.y  = 0;
    }
    if(point.y  > 479){
        point.y  = 479;
    }
}

float get_depth(Point2i bbox_center, Mat depth_img){
    Point2i left_up_p,right_up_p,left_down_p,right_down_p;
    int circle_r = 1;
    float depth_min = float(depth_img.at<uint16_t>(bbox_center.y, bbox_center.x)*0.001);
    // cout<<"ok1 raw "<<depth_img.at<uint16_t>(bbox_center.y, bbox_center.x)<<endl;
    // cout<<"ok1 "<<depth_min<<endl;
    while(depth_min <0.001&& circle_r<480){
        vector<float>  depth_queue;
        left_up_p.x = bbox_center.x - circle_r;
        left_up_p.y = bbox_center.y - circle_r;
        limit_point_in_img(left_up_p);
        right_up_p.x = bbox_center.x + circle_r;
        right_up_p.y = bbox_center.y - circle_r;
        limit_point_in_img(right_up_p);
        for(int x_idx = left_up_p.x ; x_idx <= right_up_p.x; x_idx++){
            depth_queue.push_back(float(depth_img.at<uint16_t>(left_up_p.y , x_idx)*0.001));
        }
        left_down_p.x = bbox_center.x - circle_r;
        left_down_p.y = bbox_center.y + circle_r;
        limit_point_in_img(left_down_p);
        right_down_p.x = bbox_center.x + circle_r;
        right_down_p.y = bbox_center.y + circle_r;
        limit_point_in_img(right_down_p);
        for(int x_idx = left_up_p.x ; x_idx <= right_up_p.x + circle_r; x_idx++){
            depth_queue.push_back(float(depth_img.at<uint16_t>(left_down_p.y , x_idx)*0.001));
        }
        for(int y_idx = left_up_p.y +1 ; y_idx <= left_down_p.y -1; y_idx++){
            depth_queue.push_back(float(depth_img.at<uint16_t>(y_idx, left_up_p.x)*0.001));
        }
        for(int y_idx = right_up_p.y +1 ; y_idx <= right_down_p.y -1; y_idx++){
            depth_queue.push_back(float(depth_img.at<uint16_t>(y_idx, right_up_p.x)*0.001));
        }
        circle_r++;
        depth_min = *min_element(depth_queue.begin(),depth_queue.end());
    }
    return depth_min;
}

void send_img(Mat img){
    share_men_instance_.send_img(img);
}

void hand_img_CB(const sensor_msgs::ImageConstPtr& front_image_msg, const sensor_msgs::ImageConstPtr& depth_img_msg){
    cv_bridge::CvImagePtr image_front;
    image_front = cv_bridge::toCvCopy(front_image_msg, sensor_msgs::image_encodings::BGR8);
    hand_img_ = (*image_front).image;
    send_img(hand_img_);
    share_men_instance_.p_share_data->time_stamp=float(front_image_msg->header.stamp.toSec());

    hand_depth_img_= cv_bridge::toCvCopy(depth_img_msg, sensor_msgs::image_encodings::TYPE_16UC1)->image;

    while(share_men_instance_.p_share_data->img_flag != 3){
       ros::Duration(0.001).sleep();
    }

    Mat img_show;
    hand_img_.copyTo(img_show);
    if (  share_men_instance_.p_share_data->img_flag == 3 ){  
        share_men_instance_.p_share_data->img_flag = 0;
        cout<<"object num  "<<share_men_instance_.p_share_data->img_pre_object_size<<endl;
        for(int i = 0; i < share_men_instance_.p_share_data->img_pre_object_size; i++){
            Rect rect(share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i], share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 1],
                 share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 2] - share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i] , 
                 share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 3] - share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 1]);//左上坐标（x,y）和矩形的长(x)宽(y)
            // cout<<"2dbbox  "<< share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i ] <<" "<<share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 1] <<" "<<
            //     share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 2] <<" "<<share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 3] <<" "<<endl;
            if(rect.width<50||rect.height<50)
                return;
            cv::rectangle(img_show, rect, Scalar(255, 0, 0),3, LINE_8,0);
            Point2i bbox_center;
            bbox_center.x = (share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i] + share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 2])/2;
            bbox_center.y = (share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 1] + share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 3])/2;
            Eigen::Vector4f center_pos_camera(calib_.img2camera(bbox_center.x , bbox_center.y ,get_depth(bbox_center, hand_depth_img_)));
            // center_pos_lidar =  ;
            cout<<"object num_id: "<<i<<endl;
            cout<<" depth: "<< get_depth(bbox_center, hand_depth_img_) <<endl;
            cout<<"pos lidar:  "<<center_pos_camera(0)<<" "<<center_pos_camera(1)<<" "<<center_pos_camera(2)<<endl;
            double angle_feture =  feature_match_.cal_yaw(hand_img_,rect);
            if(angle_feture>180){
                cout<<"ERROR!!!!! cannot find H"<<endl;
                return;
            }
            center_pos_camera(3) = angle_feture;
            hand_objs_.push_back(center_pos_camera);
        }
        t2_ = std::chrono::system_clock::now();
        cout<<"cost time   "<<std::chrono::duration_cast<std::chrono::microseconds>( t2_-t1_ ).count()<<std::endl;
    }

    {
        sensor_msgs::ImagePtr msg = cv_bridge::CvImage(std_msgs::Header(), "bgr8", img_show).toImageMsg();  //opencv 是按照bgr8显示的,不正确的话,图像显示有问题
        image_pub_.publish(msg);
    }
    
    if(hand_objs_.size()<1|| !is_obj_stable(hand_objs_))
        return;

    tf::Quaternion q_delta;
    q_delta.setRPY(0, 0,hand_objs_[0](3));

    nav_msgs::Odometry pose_delta;
    pose_delta.header.stamp = ros::Time::now(); 
    pose_delta.header.frame_id = "ok";
    pose_delta.pose.pose.position.x = hand_objs_[0](0);
    pose_delta.pose.pose.position.y = hand_objs_[0](1);
    pose_delta.pose.pose.position.z = hand_objs_[0](2);
    pose_delta.pose.pose.orientation.w = q_delta.w();
    pose_delta.pose.pose.orientation.x = q_delta.x();
    pose_delta.pose.pose.orientation.y = q_delta.y();
    pose_delta.pose.pose.orientation.z = q_delta.z();
    pose_delta.twist.twist.angular.z = hand_objs_[0](3);
    hand_obj_pos_pub_.publish(pose_delta);
    clearVector(hand_objs_);
}

void img_CB(const sensor_msgs::ImageConstPtr& front_image_msg, const sensor_msgs::ImageConstPtr& depth_img_msg,
                            const nav_msgs::OdometryConstPtr& slam_odom_msg){
    if(is_get_obj_pos)
        return;

    Eigen::Quaternionf slam_odom_q(slam_odom_msg->pose.pose.orientation.w, slam_odom_msg->pose.pose.orientation.x,
                                                                            slam_odom_msg->pose.pose.orientation.y,slam_odom_msg->pose.pose.orientation.z);
    Eigen::Matrix4f slam_odom_T = Eigen::Matrix4f::Identity();
    slam_odom_T.block<3, 3>(0, 0) = slam_odom_q.toRotationMatrix();
    slam_odom_T.block<3, 1>(0, 3) = Eigen::Vector3f(slam_odom_msg->pose.pose.position.x, slam_odom_msg->pose.pose.position.y, slam_odom_msg->pose.pose.position.z);

    if(share_men_instance_.p_share_data->img_flag == 0  ) // 
        t1_ = std::chrono::system_clock::now();
    // cout<<"cb 1"<<endl;
    cv_bridge::CvImagePtr image_front;
    image_front = cv_bridge::toCvCopy(front_image_msg, sensor_msgs::image_encodings::BGR8);
    img_ = (*image_front).image;
    send_img(img_);
    share_men_instance_.p_share_data->time_stamp=float(front_image_msg->header.stamp.toSec());

    //depth img
    depth_img_= cv_bridge::toCvCopy(depth_img_msg, sensor_msgs::image_encodings::TYPE_16UC1)->image;
    // std::cout << "depth image data: " << depth_img.at<uint16_t>(240, 320) << std::endl;//表示获取图像坐标为240,320的深度值,单位是毫米
    //depth img8

    while(share_men_instance_.p_share_data->img_flag != 3){
       ros::Duration(0.001).sleep();
    }

    Mat img_show;
    img_.copyTo(img_show);
    if (  share_men_instance_.p_share_data->img_flag == 3 ){  
        share_men_instance_.p_share_data->img_flag = 0;
        cout<<"object num  "<<share_men_instance_.p_share_data->img_pre_object_size<<endl;
        for(int i = 0; i < share_men_instance_.p_share_data->img_pre_object_size; i++){
            Rect rect(share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i], share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 1],
                 share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 2] - share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i] , 
                 share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 3] - share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 1]);//左上坐标（x,y）和矩形的长(x)宽(y)
            // cout<<"2dbbox  "<< share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i ] <<" "<<share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 1] <<" "<<
            //     share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 2] <<" "<<share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 3] <<" "<<endl;
            cv::rectangle(img_show, rect, Scalar(255, 0, 0),3, LINE_8,0);
            Point2i bbox_center;
            bbox_center.x = (share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i] + share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 2])/2;
            bbox_center.y = (share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 1] + share_men_instance_.p_share_data->img_pre_2dbbox_xyxy[4*i + 3])/2;
            Eigen::Vector4f center_pos_lidar(calib_.img2lidar(bbox_center.x , bbox_center.y ,get_depth(bbox_center, depth_img_) -depth_bias_  ));
            // center_pos_lidar =  ;
            cout<<"object num_id: "<<i<<endl;
            cout<<" depth: "<< get_depth(bbox_center,depth_img_) <<endl;
            cout<<"pos lidar:  "<<center_pos_lidar(0)<<" "<<center_pos_lidar(1)<<" "<<center_pos_lidar(2)<<endl;
            top_objs_.push_back(slam_odom_T*center_pos_lidar);
        }
        t2_ = std::chrono::system_clock::now();
        cout<<"cost time   "<<std::chrono::duration_cast<std::chrono::microseconds>( t2_-t1_ ).count()<<std::endl;
    }

    {
        sensor_msgs::ImagePtr msg = cv_bridge::CvImage(std_msgs::Header(), "bgr8", img_show).toImageMsg();  //opencv 是按照bgr8显示的,不正确的话,图像显示有问题
        image_pub_.publish(msg);
    }

    if(top_objs_.size()<4||!is_obj_stable(top_objs_)){
        cout<<"ERROR!!!!!! top camera data is unstable"<<endl;
        return;
    }

    geometry_msgs::PoseStamped target_pose;
    target_pose.header.stamp = ros::Time::now();
    target_pose.header.frame_id = "map";
    target_pose.pose.position.x = top_objs_[0](0);
    target_pose.pose.position.y = top_objs_[0](1);
    target_pose.pose.position.z = top_objs_[0](2);
    target_pose.pose.orientation.w = 1;
    target_pose.pose.orientation.x = 0;
    target_pose.pose.orientation.y = 0;
    target_pose.pose.orientation.z = 0;

    target_goal_pub_.publish(target_pose);
    is_get_obj_pos = true;

}

int main(int argc, char* argv[])
{
    ros::init(argc, argv, "det_handle_img");
    ros::NodeHandle nh;
    message_filters::Subscriber<sensor_msgs::Image> image_front_sub(nh, "/camera/color/image_raw", 5);
    message_filters::Subscriber<sensor_msgs::Image> depth_image_sub(nh, "/camera/aligned_depth_to_color/image_raw", 5);
    typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image,nav_msgs::Odometry> top_SyncPolicy;
    message_filters::Subscriber<nav_msgs::Odometry> slam_odom_sub(nh, "/odom", 5);
    // ApproximateTime takes a queue size as its constructor argument, hence MySyncPolicy(10)
    message_filters::Synchronizer<top_SyncPolicy> sync(top_SyncPolicy(20), image_front_sub, depth_image_sub,slam_odom_sub);
    sync.setMaxIntervalDuration(ros::Duration(0.05));
    // ROS_INFO("ok1");
    sync.registerCallback(boost::bind(&img_CB, _1, _2,_3));
    // ROS_INFO("ok2");

    message_filters::Subscriber<sensor_msgs::Image> hand_image_front_sub(nh, "/camera/color/image_raw", 5);
    message_filters::Subscriber<sensor_msgs::Image> hand_depth_image_sub(nh, "/camera/aligned_depth_to_color/image_raw", 5);
    typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image> MySyncPolicy;
    // ApproximateTime takes a queue size as its constructor argument, hence MySyncPolicy(10)
    message_filters::Synchronizer<MySyncPolicy> hand_sync(MySyncPolicy(20), hand_image_front_sub, hand_depth_image_sub);
    hand_sync.setMaxIntervalDuration(ros::Duration(0.05));
    // ROS_INFO("ok1");
    hand_sync.registerCallback(boost::bind(&hand_img_CB, _1, _2));

    image_transport::ImageTransport it(nh);
    image_pub_ = it.advertise("result_img", 1);

    //publish obj pos
    hand_obj_pos_pub_ = nh.advertise<nav_msgs::Odometry>("obj_pose", 10);
    target_goal_pub_ = nh.advertise<geometry_msgs::PoseStamped>("/move_base_simple/goal", 10);

    ros::spin();

    return 0;
}
