#include "parser.h"
#include <pcl/visualization/pcl_visualizer.h>
#include "monocamera_trans.h"
#define DATA_PATH "/media/data/datasets/kyxz/recent_data/"
#define AHD_H 768
#define AHD_W 1024

cv::Mat transform(const float *rotation_vector)
{
    const float rx = rotation_vector[0];
    const float ry = rotation_vector[1];
    const float rz = rotation_vector[2];
    cv::Mat lidarRx = (cv::Mat_<float>(3,3) << 1,    0,    0,
                                    0, cos(rx), -sin(rx), 
                                    0, sin(rx), cos(rx));
    cv::Mat lidarRy = (cv::Mat_<float>(3,3) << cos(ry), 0, sin(ry),
                                    0,        1,    0,
                                -sin(ry), 0, cos(ry));
    cv::Mat lidarRz = (cv::Mat_<float>(3,3) << cos(rz), -sin(rz), 0, 
                                    sin(rz), cos(rz), 0,
                                    0,      0,      1);
    return lidarRx * lidarRy * lidarRz; 
}

int main(int argc, char** argv)
{

    ros::init(argc, argv, "test_show");
    
    // char ebuf[PCAP_ERRBUF_SIZE];
    // 传到回调函数里两个参数，第一个参数是用来统计当前是第几个packet，第二个参数是当前packet的时间戳

    std::string data_path = DATA_PATH;
    const char* filepath = "/media/data/datasets/kyxz/recent_data/Raw-001-HDL32.pcap";
    Parser parser(filepath);
    
    parser.init();
    // parser.decoder_points();
    // int64_t time = 1605416133932390;
    int64_t time = 1605416068596306;
    
    // float rx = 0.009483625;
    // float ry = -0.035041669;
    // float rz = 4.39822964;
    // cv::Mat lidarRx = (cv::Mat_<float>(3,3) << 1,    0,    0,
    //                                             0, cos(rx), -sin(rx), 
    //                                             0, sin(rx), cos(rx));
    // cv::Mat lidarRy = (cv::Mat_<float>(3,3) << cos(ry), 0, sin(ry),
    //                                          0,        1,    0,
    //                                         -sin(ry), 0, cos(ry));
    // cv::Mat lidarRz = (cv::Mat_<float>(3,3) << cos(rz), -sin(rz), 0, 
    //                                             sin(rz), cos(rz), 0,
    //                                             0,      0,      1);
    /**/



    const float lidar_rotation_vector_[3] = {0.009483625, -0.035041669, 4.39822964};   
    const cv::Mat lidar_translation_vector_ = (cv::Mat_<float>(3,1) << 0, 1.120, 2.460);
    const float camera_rotation_vector_[3] = {1.5490272693, -0.021000000, 0.};   
    const cv::Mat camera_translation_vector_ = (cv::Mat_<float>(3,1) << 0, 1.760, 1.877);

    cv::Mat lidarR = transform(lidar_rotation_vector_);
    cv::Mat lidarT = (cv::Mat_<float>(3,1) << 0, 1.120, 2.460);

    // cv::Mat cameraR = transform(camera_rotation_vector_);
    float rx = 1.5690272693f;
    float ry = -0.021000000f;
    float rz = 0.f;
    cv::Mat cameraR = (cv::Mat_<float>(3,3) << cos(ry)*cos(rz), cos(rz)*sin(rx)*sin(ry)-cos(rx)*sin(rz), sin(rx)*sin(rz)+cos(rx)*cos(rz)*sin(ry),
                                            cos(ry)*sin(rz),    sin(rx)*sin(ry)*sin(rz)+cos(rx)*cos(rz), cos(rx)*sin(ry)*sin(rz)-cos(rz)*sin(rx),
                                             -sin(ry),                      cos(ry)*sin(rx),                          cos(rx)*cos(ry));
        cv::Mat cameraT = (cv::Mat_<float>(3,1) << 0,1.760,1.877);

    pcl::PointCloud<pcl::PointXYZI> cloud = parser.get_point_frame(time);
    // // cv::Mat image = cv::imread("/media/data/datasets/kyxz/recent_data/Raw-001-Camera/1605416133957739.png");
    cv::Mat image = cv::imread("/media/data/datasets/kyxz/recent_data/Raw-001-Camera/1605416068558455.png");

    // pcl::PointCloud<pcl::PointXYZ>::Ptr lidarshow_point(new pcl::PointCloud<pcl::PointXYZ>);
    // pcl::PointCloud<pcl::PointXYZ>::Ptr egoshow_point(new pcl::PointCloud<pcl::PointXYZ>);
    // pcl::PointCloud<pcl::PointXYZ>::Ptr camerashow_point(new pcl::PointCloud<pcl::PointXYZ>);
    CoordinateTrans velo32C_trans_; /**< Velodyne 到车体的转换关系 */
	MonoCameraTrans mono_trans_; /**< 相机坐标到车体的转换关系 */
    mono_trans_.LoadCameraCalib((data_path + "/Raw-001-CalibParams/Raw-001-Camera.camera").c_str());
	velo32C_trans_.LoadCalib((data_path + "/Raw-001-CalibParams/Raw-001-HDL32-E.txt").c_str());
    for(int i = 0; i < cloud.size(); i++)
    {
        float x = cloud.points[i].x;
        float y = cloud.points[i].y;
        float z = cloud.points[i].z;
        if ((x*x + y *y + z*z) >225)
        continue;

        Point3d veloLocalPt; /**< 从*.pcap文件解析出来的一个激光点（HDL32局部坐标系下）*/
        veloLocalPt.x = x;
        veloLocalPt.y = y;
        veloLocalPt.z = z;
	    Point3d vehiclePt; /**< 转换到车体局部坐标系的点 */
	    velo32C_trans_.LocalP2VehicleP(veloLocalPt, vehiclePt);

	    double imgx, imgy;
	    mono_trans_.VehicleP2ImageP(vehiclePt, imgx, imgy); /**< 车体坐标系到图像坐标 */
        if (imgx < 0 || imgx >= AHD_W || imgy < 0 || imgy >= AHD_H)
            continue;
        ROS_INFO("%lf %lf", imgx, imgy);
        cv::circle(image, cv::Point(imgx, imgy), 1, cv::Scalar(0, 0, 255), -1);

        // x = -11;
        // y = -5;
        // z = -0.55;
        // lidarshow_point->push_back(pcl::PointXYZ(x, y, z));

        // std::cout << "Point " << i << ": " << x << " " << y << " " << z << std::endl;
        // cv::Mat X = (cv::Mat_<float>(3,1) << x, y, z);
        // cv::Mat Y = lidarR * X + lidarT; // 转到车体坐标系
        // // std::cout << Y << std::endl;
        // Y= Y-cameraT;
        // // egoshow_point->push_back(pcl::PointXYZ(Y.at<float>(0,0), Y.at<float>(1,0), Y.at<float>(2,0)));
        // /*车体转到相机坐标系*/
        // // Y = (cv::Mat_<float>(3,1) << Y.at<float>(0,0), -Y.at<float>(1,0), Y.at<float>(2,0));
        // cv::Mat Camera_coor = cameraR * Y;

        // // std::cout << Camera_coor << std::endl;
        // float xi = Camera_coor.at<float>(0,0);
        // float yi = -Camera_coor.at<float>(1,0);
        // float zi = Camera_coor.at<float>(2,0);
        // if (zi < 0)
        // continue;
        // // camerashow_point->push_back(pcl::PointXYZ(xi, yi, zi));

        // int xu = 20.513754829 * xi / zi / 0.023 + 512.14658869;
        // int yu = 20.51362425 * yi / zi / 0.023 + 384.83460355;
        // yu = 768 - yu;
        // ROS_INFO("%d %d", xu, yu);
        // if (xu < 0 || xu >= 1024 || yu < 0 || yu >= 768)
        //     continue;
        // cv::circle(image, cv::Point(xu, yu), 3, cv::Scalar(0, 0, 255), -1);
        // if(Camera_coor.at<float>(2,0)  >20)
        // std::cout << Camera_coor.at<float>(2,0) << std::endl;
        // }
        // break;

    }
    // pcl::visualization::PCLVisualizer::Ptr viewerlidar(new pcl::visualization::PCLVisualizer);
    // pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ> blue(lidarshow_point, 0, 0, 255);
    // viewerlidar->addPointCloud(lidarshow_point, blue, "lidar point");

    // pcl::visualization::PCLVisualizer::Ptr viewerego(new pcl::visualization::PCLVisualizer);
    // pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ> green(egoshow_point, 0, 255, 0);
    // viewerego->addPointCloud(egoshow_point, green, "ego point");

    // pcl::visualization::PCLVisualizer::Ptr viewercamera(new pcl::visualization::PCLVisualizer);
    // pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ> red(camerashow_point, 255, 0, 0);
    // viewercamera->addPointCloud(camerashow_point, red, "camera point");
    
    cv::imshow("show", image);
    // while(!viewerlidar->wasStopped() /*&& !viewerego->wasStopped() && !viewercamera->wasStopped()*/)
    // {
    //     viewerlidar->spinOnce();
        // viewerego->spinOnce();
    //     // viewercamera->spinOnce();
    // }
    // viewerlidar->spin();
        // viewerego->spin();
        // viewercamera->spin();
    cv::waitKey();

    return 0;
}


