#include "std_msgs/String.h"
#include <sensor_msgs/Image.h>
#include <sensor_msgs/PointCloud2.h>
#include <ros/package.h>
#include <message_filters/subscriber.h>
#include <message_filters/time_synchronizer.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
// image
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <cv_bridge/cv_bridge.h>
#include <image_transport/image_transport.h>
#include <tf2/convert.h>
#include <tf/transform_broadcaster.h>
#include <tf/transform_datatypes.h>
#include <Eigen/Dense>

// pcl
#include <pcl/io/pcd_io.h>
#include <pcl/point_types.h>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl/point_cloud.h>
#include "point_xyzirt.h"
#include <pcl/kdtree/kdtree_flann.h>
#include <pcl/kdtree/flann.h>
#include "pcl_ros/transforms.h"
// project color
double colmap[50][3] = {{0, 0, 0.5385}, {0, 0, 0.6154}, {0, 0, 0.6923}, {0, 0, 0.7692}, {0, 0, 0.8462}, {0, 0, 0.9231}, {0, 0, 1.0000}, {0, 0.0769, 1.0000}, {0, 0.1538, 1.0000}, {0, 0.2308, 1.0000}, {0, 0.3846, 1.0000}, {0, 0.4615, 1.0000}, {0, 0.5385, 1.0000}, {0, 0.6154, 1.0000}, {0, 0.6923, 1.0000}, {0, 0.7692, 1.0000}, {0, 0.8462, 1.0000}, {0, 0.9231, 1.0000}, {0, 1.0000, 1.0000}, {0.0769, 1.0000, 0.9231}, {0.1538, 1.0000, 0.8462}, {0.2308, 1.0000, 0.7692}, {0.3077, 1.0000, 0.6923}, {0.3846, 1.0000, 0.6154}, {0.4615, 1.0000, 0.5385}, {0.5385, 1.0000, 0.4615}, {0.6154, 1.0000, 0.3846}, {0.6923, 1.0000, 0.3077}, {0.7692, 1.0000, 0.2308}, {0.8462, 1.0000, 0.1538}, {0.9231, 1.0000, 0.0769}, {1.0000, 1.0000, 0}, {1.0000, 0.9231, 0}, {1.0000, 0.8462, 0}, {1.0000, 0.7692, 0}, {1.0000, 0.6923, 0}, {1.0000, 0.6154, 0}, {1.0000, 0.5385, 0}, {1.0000, 0.4615, 0}, {1.0000, 0.3846, 0}, {1.0000, 0.3077, 0}, {1.0000, 0.2308, 0}, {1.0000, 0.1538, 0}, {1.0000, 0.0769, 0}, {1.0000, 0, 0}, {0.9231, 0, 0}, {0.8462, 0, 0}, {0.7692, 0, 0}, {0.6923, 0, 0}};

struct Rot_Trans_Euler
{
    double m1; 
    double m2;
    double m3;
    double x;
    double m4; 
    double m5;
    double m6;
    double y;
    double m7; 
    double m8;
    double m9;
    double z;
} eul_m;

struct Rot_Trans
{
    double e1; // Joint (Rotation and translation) optimization variables
    double e2;
    double e3;
    double x;
    double y;
    double z;
    std::string to_string() const
    {
        return std::string("{") + "e1:" + std::to_string(e1) + ", e2:" + std::to_string(e2) + ", e3:" + std::to_string(e3) + ", x:" + std::to_string(x) + ", y:" + std::to_string(y) + ", z:" + std::to_string(z) + "}";
    }
} eul_t, eul_it;



struct initial_parameters
{
    std::string camera_topic;
    std::string lidar_topic;
    bool fisheye_model;
    int lidar_ring_count;
    std::pair<int, int> grid_size;
    int square_length;                        // in millimetres
    std::pair<int, int> board_dimension;      // in millimetres
    std::pair<int, int> cb_translation_error; // in millimetres
    cv::Mat cameramat;
    int distcoeff_num;
    cv::Mat distcoeff;
    std::pair<int, int> image_size;
} i_params;


//  define variable
image_transport::Publisher pub_img_dist;
#define PI 3.141592653589793238463
 Rot_Trans rot_trans;


#define ROT_TYPE_EULER 1
#define ROT_TYPE_MATRIX 2
int rot_type=ROT_TYPE_EULER;

double *converto_imgpts(double x, double y, double z)
{
    double tmpxC;
    tmpxC = x / z;
    double tmpyC = y / z;
    cv::Point2d planepointsC;

    planepointsC.x = tmpxC;
    planepointsC.y = tmpyC;

    double r2 = tmpxC * tmpxC + tmpyC * tmpyC;

    if (i_params.fisheye_model)
    {
        double r1 = pow(r2, 0.5);
        double a0 = std::atan(r1);
        double a1 = a0 * (1 + i_params.distcoeff.at<double>(0) * pow(a0, 2) + i_params.distcoeff.at<double>(1) * pow(a0, 4) + i_params.distcoeff.at<double>(2) * pow(a0, 6) + i_params.distcoeff.at<double>(3) * pow(a0, 8));
        planepointsC.x = (a1 / r1) * tmpxC;
        planepointsC.y = (a1 / r1) * tmpyC;
        planepointsC.x = i_params.cameramat.at<double>(0, 0) * planepointsC.x + i_params.cameramat.at<double>(0, 2);
        planepointsC.y = i_params.cameramat.at<double>(1, 1) * planepointsC.y + i_params.cameramat.at<double>(1, 2);
    }
    else // For pinhole camera model
    {
        double tmpdist = 1 + i_params.distcoeff.at<double>(0) * r2 + i_params.distcoeff.at<double>(1) * r2 * r2 +
                         i_params.distcoeff.at<double>(4) * r2 * r2 * r2;
        planepointsC.x = tmpxC * tmpdist + 2 * i_params.distcoeff.at<double>(2) * tmpxC * tmpyC +
                         i_params.distcoeff.at<double>(3) * (r2 + 2 * tmpxC * tmpxC);
        planepointsC.y = tmpyC * tmpdist + i_params.distcoeff.at<double>(2) * (r2 + 2 * tmpyC * tmpyC) +
                         2 * i_params.distcoeff.at<double>(3) * tmpxC * tmpyC;
        planepointsC.x = i_params.cameramat.at<double>(0, 0) * planepointsC.x + i_params.cameramat.at<double>(0, 2);
        planepointsC.y = i_params.cameramat.at<double>(1, 1) * planepointsC.y + i_params.cameramat.at<double>(1, 2);
    }

    double *img_coord = new double[2];
    *(img_coord) = planepointsC.x;
    *(img_coord + 1) = planepointsC.y;

    return img_coord;
}
pcl::PointCloud<pcl::RsPointXYZIRT> organized_pointcloud(pcl::PointCloud<pcl::RsPointXYZIRT>::Ptr input_pointcloud)
{
    pcl::PointCloud<pcl::RsPointXYZIRT> organized_pc;
    pcl::KdTreeFLANN<pcl::RsPointXYZIRT> kdtree;

    // Kdtree to sort the point cloud
    kdtree.setInputCloud(input_pointcloud);

    pcl::RsPointXYZIRT searchPoint; // camera position as target
    searchPoint.x = 0.0f;
    searchPoint.y = 0.0f;
    searchPoint.z = 0.0f;

    int K = input_pointcloud->points.size();
    std::vector<int> pointIdxNKNSearch(K);
    std::vector<float> pointNKNSquaredDistance(K);

    // Sort the point cloud based on distance to the camera
    if (kdtree.nearestKSearch(searchPoint, K, pointIdxNKNSearch, pointNKNSquaredDistance) > 0)
    {
        for (size_t i = 0; i < pointIdxNKNSearch.size(); ++i)
        {
            pcl::RsPointXYZIRT point;
            point.x = input_pointcloud->points[pointIdxNKNSearch[i]].x;
            point.y = input_pointcloud->points[pointIdxNKNSearch[i]].y;
            point.z = input_pointcloud->points[pointIdxNKNSearch[i]].z;
            point.intensity = input_pointcloud->points[pointIdxNKNSearch[i]].intensity;
            point.ring = input_pointcloud->points[pointIdxNKNSearch[i]].ring;
            organized_pc.push_back(point);
        }
    }

    // Return sorted point cloud
    return (organized_pc);
}
void image_projection(const sensor_msgs::Image::ConstPtr &img, const sensor_msgs::PointCloud2::ConstPtr &pc)
{
    ROS_INFO("coming here");
    cv_bridge::CvImagePtr cv_ptr;
    cv::Mat new_image_raw;
    pcl::PointCloud<pcl::RsPointXYZIRT>::Ptr cloud(new pcl::PointCloud<pcl::RsPointXYZIRT>);

    cv_ptr = cv_bridge::toCvCopy(img, "bgr8");
    new_image_raw = cv_ptr->image;

    pcl::fromROSMsg(*pc, *cloud);

    Eigen::Affine3f transform_A = Eigen::Affine3f::Identity();
        if(rot_type ==ROT_TYPE_EULER)
    {
            // Extrinsic parameter: Transform Velodyne -> cameras
    tf::Matrix3x3 rot;
    rot.setRPY(rot_trans.e1, rot_trans.e2, rot_trans.e3);

    Eigen::MatrixXf t1(4, 4), t2(4, 4);
    t1 << rot.getRow(0)[0], rot.getRow(0)[1], rot.getRow(0)[2], rot_trans.x,
        rot.getRow(1)[0], rot.getRow(1)[1], rot.getRow(1)[2], rot_trans.y,
        rot.getRow(2)[0], rot.getRow(2)[1], rot.getRow(2)[2], rot_trans.z,
        0, 0, 0, 1;
    t2 = t1.inverse();

    Eigen::Affine3f transform_A = Eigen::Affine3f::Identity();
    transform_A.matrix() << t2(0, 0), t2(0, 1), t2(0, 2), t2(0, 3),
        t2(1, 0), t2(1, 1), t2(1, 2), t2(1, 3),
        t2(2, 0), t2(2, 1), t2(2, 2), t2(2, 3),
        t2(3, 0), t2(3, 1), t2(3, 2), t2(3, 3);

    }
    else if(rot_type ==ROT_TYPE_MATRIX)
    {
        Eigen::MatrixXf t1(4,4),t2(4, 4);
        t1<< eul_m.m1,eul_m.m2,eul_m.m3,eul_m.x,
        eul_m.m4,eul_m.m5,eul_m.m6,eul_m.y,
        eul_m.m7,eul_m.m8,eul_m.m9,eul_m.z,
        0,0,0,1;
        t2 = t1.inverse();
        // t2 = t1;
        
        transform_A.matrix() << t2(0, 0), t2(0, 1), t2(0, 2), t2(0, 3),
        t2(1, 0), t2(1, 1), t2(1, 2), t2(1, 3),
        t2(2, 0), t2(2, 1), t2(2, 2), t2(2, 3),
        t2(3, 0), t2(3, 1), t2(3, 2), t2(3, 3);

    }

    if (cloud->size() < 1)
        return;

    pcl::PointCloud<pcl::RsPointXYZIRT> organized;
    organized = organized_pointcloud(cloud);

    for (pcl::PointCloud<pcl::RsPointXYZIRT>::const_iterator it = organized.begin(); it != organized.end(); it++)
    {
        pcl::RsPointXYZIRT itA;
        itA = pcl::transformPoint(*it, transform_A);
        if (itA.z < 0 or std::abs(itA.x / itA.z) > 1.2 or itA.z>10)
            continue;

        double *img_pts = converto_imgpts(itA.x, itA.y, itA.z);
        double length = sqrt(pow(itA.x, 2) + pow(itA.y, 2) + pow(itA.z, 2)); // range of every point
        int color = std::min(round((length / 30) * 49), 49.0);

        if (img_pts[1] >= 0 and img_pts[1] < i_params.image_size.second and img_pts[0] >= 0 and img_pts[0] < i_params.image_size.first)
        {
            cv::circle(new_image_raw, cv::Point(img_pts[0], img_pts[1]), 3,
                       CV_RGB(255 * colmap[color][0], 255 * colmap[color][1], 255 * colmap[color][2]), -1);
        }
    }

    // Publish the image projection
    ros::Time time = ros::Time::now();
    cv_ptr->encoding = "bgr8";
    cv_ptr->header.stamp = time;
    cv_ptr->header.frame_id = "/traj_output";
    cv_ptr->image = new_image_raw;
    pub_img_dist.publish(cv_ptr->toImageMsg());
}

int main(int argc, char **argv)
{
    ROS_INFO("project2image");
    ros::init(argc, argv, "project2image");
    ros::NodeHandle n;

    // read param
    std::string pkg_loc = ros::package::getPath("reproject");
    std::ifstream infile(pkg_loc + "/cfg/reproject_params_autoware.txt");
    infile >> i_params.camera_topic;
    infile >> i_params.lidar_topic;
    infile >> i_params.fisheye_model;
    infile >> i_params.lidar_ring_count;
    double camera_mat[9];
    for (int i = 0; i < 9; i++)
    {
        infile >> camera_mat[i];
    }

    cv::Mat(3, 3, CV_64F, &camera_mat).copyTo(i_params.cameramat);

    infile >> i_params.distcoeff_num;
    double dist_coeff[i_params.distcoeff_num];
    for (int i = 0; i < i_params.distcoeff_num; i++)
    {
        infile >> dist_coeff[i];
    }
    cv::Mat(1, i_params.distcoeff_num, CV_64F, &dist_coeff).copyTo(i_params.distcoeff);
    int  i_l, i_b;
    infile >> i_l;
    infile >> i_b;
    i_params.image_size = std::make_pair(i_l, i_b);


    infile>> rot_type;
    if(rot_type ==ROT_TYPE_EULER)
    {
        infile>>rot_trans.e1;
        infile>>rot_trans.e2;
        infile>>rot_trans.e3;
        infile>>rot_trans.x;
        infile>>rot_trans.y;
        infile>>rot_trans.z;

    }
    else if(rot_type ==ROT_TYPE_MATRIX)
    {
        infile>>eul_m.m1;
        infile>>eul_m.m2;
        infile>>eul_m.m3;
        infile>>eul_m.x;
        infile>>eul_m.m4;
        infile>>eul_m.m5;
        infile>>eul_m.m6;
        infile>>eul_m.y;
        infile>>eul_m.m7;
        infile>>eul_m.m8;
        infile>>eul_m.m9;
        infile>>eul_m.z;

    }



    // subscriber
    message_filters::Subscriber<sensor_msgs::Image> image_sub(n, i_params.camera_topic, 5);
    message_filters::Subscriber<sensor_msgs::PointCloud2> pcl_sub(n, i_params.lidar_topic, 5);
    typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::PointCloud2> MySyncPolicy;
    message_filters::Synchronizer<MySyncPolicy> sync(MySyncPolicy(5), image_sub, pcl_sub);
    sync.registerCallback(boost::bind(&image_projection, _1, _2));

    // publisher
    image_transport::ImageTransport it(n);
    pub_img_dist = it.advertise("image_projection", 20);

    ros::spin();
    return 0;
}