#include "senyun_driver/senyun_driver.h"
#include <boost/lexical_cast.hpp>

namespace senyun_driver
{

    void SenyunDriver::onInit()
    {
        init_nodelet();
        
        // CREATE_MONITOR_PUBLISHER(wait_for_monitor("stereo_driver"), 1);
        
        use_undis_ = boost::lexical_cast<bool>(params_["use_undis"]);
        use_compress_ = boost::lexical_cast<bool>(params_["use_compress"]);
        extern_use_crop_ = boost::lexical_cast<bool>(params_["use_crop"]);
        extern_use_undis_ = use_undis_;
        extern_use_compress_ = use_compress_;
        device_name_ = params_["device_name"];

        cv::FileStorage fs(params_["calib_file"], cv::FileStorage::READ);
        if (!fs.isOpened())
        {
            std::cout << "failed to open " << params_["calib_file"] << std::endl;
        }
        fs["K"] >> intrinsic_matrix;
        fs["D"] >> distort_;
        printf("***** raw col %d, %d \n",distort_.rows,distort_.cols);
        fs.release();

        camera_handler = std::make_shared<GmslCameraHandler>(device_name_);
        camera_handler->start_camera();

        printf("camera initiated.\r\n");

        CREATE_PUBLISHER<sensor_msgs::Image>("im_topic", params_["im_topic"], 1, false);
        thread_ = std::thread(std::bind(&SenyunDriver::loop, this));
    }

    void SenyunDriver::loop()
    {
        // int ret = 0;
        struct timeval ts1;
        struct timeval ts2;

        vpiInitConvertImageFormatParams(&cvtParams);
        cvtParams.policy = VPI_CONVERSION_CLAMP;
        cvtParams.scale = 1;
        cvtParams.offset = 0;
        cvtParams.flags = 0;

        cv::cvtColor(output_image, output_image, cv::COLOR_RGBA2BGR);
        cv::cvtColor(output_image_resize, output_image_resize, cv::COLOR_RGBA2BGR);

        printf("init vpi.\r\n");
        CHECK_VPI_STATUS(vpiImageCreate(PIXL_WIDTH_N, 1080, VPI_IMAGE_FORMAT_NV12_ER, 0, &image_vpi_NV12ER_in));
        CHECK_VPI_STATUS(vpiImageCreate(PIXL_WIDTH_N / 2, 1080 / 2, VPI_IMAGE_FORMAT_NV12_ER, 0, &image_vpi_NV12ER_in_resize));
        CHECK_VPI_STATUS(vpiImageCreate(PIXL_WIDTH_N, 1080, VPI_IMAGE_FORMAT_NV12_ER, 0, &image_vpi_NV12ER_out));
        CHECK_VPI_STATUS(vpiImageCreate(PIXL_WIDTH_N / 2, 1080 / 2, VPI_IMAGE_FORMAT_NV12_ER, 0, &image_vpi_NV12ER_out_resize));
        CHECK_VPI_STATUS(vpiImageCreate(PIXL_WIDTH_N, 1080, VPI_IMAGE_FORMAT_BGRA8, 0, &image_vpi_BGRA8));
        CHECK_VPI_STATUS(vpiImageCreate(PIXL_WIDTH_N / 2, 1080 / 2, VPI_IMAGE_FORMAT_BGRA8, 0, &image_vpi_BGRA8_resize));

        CHECK_VPI_STATUS(vpiImageCreateOpenCVMatWrapper(output_image, 0, &image_vpi_BGR8));
        CHECK_VPI_STATUS(vpiImageCreateOpenCVMatWrapper(output_image_resize, 0, &image_vpi_BGR8_resize));

        // CHECK_VPI_STATUS(vpiImageCreate(PIXL_WIDTH, 1080, VPI_IMAGE_FORMAT_BGR8, 0, &image_vpi_BGR8));

        int ret = camera_handler->read_frame();
        if (!ret)
        {
        }

        // // First fill VPIImageData with the, well, image data...
        // VPIImageData imgData;
        // memset(&imgData, 0, sizeof(imgData));
        // imgData.format = VPI_IMAGE_FORMAT_UYVY;
        // imgData.numPlanes = 1;
        // imgData.planes[0].width = PIXL_WIDTH;
        // imgData.planes[0].height = 1080;
        // imgData.planes[0].pitchBytes = PIXL_WIDTH * 2;
        // imgData.planes[0].data = camera_handler->out_yuv_buffer;

        // // Wrap it into a VPIImage. VPI won't make a copy of it, so the original
        // // image must be in scope at all times.
        // CHECK_VPI_STATUS(vpiImageCreateHostMemWrapper(&imgData, 0, &image_vpi_UYVY));
        if (extern_use_crop_)
        {
            input_image = cv::Mat(1080, PIXL_WIDTH, CV_8UC2, camera_handler->out_yuv_buffer + 228* PIXL_WIDTH * 2, PIXL_WIDTH * 2);
            cv::copyMakeBorder(input_image,input_image_new,0,0,(PIXL_WIDTH_N-PIXL_WIDTH)/2,(PIXL_WIDTH_N-PIXL_WIDTH)/2,cv::BORDER_CONSTANT,cv::Scalar(128,0));
        }
        else
        {
            input_image = cv::Mat(1080, PIXL_WIDTH, CV_8UC2, camera_handler->out_yuv_buffer, PIXL_WIDTH * 2);
        }

        CHECK_VPI_STATUS(vpiImageCreateOpenCVMatWrapper(input_image_new, VPI_IMAGE_FORMAT_UYVY, 0, &image_vpi_UYVY));

        // Allocate a dense map.
        VPIWarpMap map = {};
        map.grid.numHorizRegions = 1;
        map.grid.numVertRegions = 1;
        map.grid.regionWidth[0] = PIXL_WIDTH_N;
        map.grid.regionHeight[0] = 1080;
        map.grid.horizInterval[0] = 1;
        map.grid.vertInterval[0] = 1;
        CHECK_VPI_STATUS(vpiWarpMapAllocData(&map));

        // Allocate a dense map for resized image.
        VPIWarpMap map_resize = {};
        map_resize.grid.numHorizRegions = 1;
        map_resize.grid.numVertRegions = 1;
        map_resize.grid.regionWidth[0] = PIXL_WIDTH_N / 2;
        map_resize.grid.regionHeight[0] = 1080 / 2;
        map_resize.grid.horizInterval[0] = 1;
        map_resize.grid.vertInterval[0] = 1;
        CHECK_VPI_STATUS(vpiWarpMapAllocData(&map_resize));

        // Initialize the fisheye lens model with the coefficients given by calibration procedure.
        VPIPolynomialLensDistortionModel distModel;
        if (distort_.rows == 5)
        {
            distModel.k1 = distort_.at<double>(0, 0);
            distModel.k2 = distort_.at<double>(0, 1); 
            distModel.p1 = distort_.at<double>(0, 2);
            distModel.p2 = distort_.at<double>(0, 3);
            distModel.k3 = distort_.at<double>(0, 4);
        }
        else
        {
            distModel.k1 = distort_.at<double>(0, 0);
            distModel.k2 = distort_.at<double>(0, 1); 
            distModel.p1 = distort_.at<double>(0, 2);
            distModel.p2 = distort_.at<double>(0, 3);
            distModel.k3 = distort_.at<double>(0, 4);
            distModel.k4 = distort_.at<double>(0, 5);
            distModel.k5 = distort_.at<double>(0, 6);
            distModel.k6 = distort_.at<double>(0, 7);
        }

        // Fill up the camera intrinsic parameters given by camera calibration procedure.
        VPICameraIntrinsic K;
        K[0][0] = intrinsic_matrix.at<double>(0, 0);
        K[1][1] = intrinsic_matrix.at<double>(1, 1);
        K[0][2] = intrinsic_matrix.at<double>(0, 2)+(PIXL_WIDTH_N-PIXL_WIDTH)/2;
        K[1][2] = intrinsic_matrix.at<double>(1, 2);

        // Camera extrinsics is be identity.
        VPICameraExtrinsic X = {};
        X[0][0] = X[1][1] = X[2][2] = 1;

        if (use_compress_)
        {
            // Generate a warp map to undistort an image taken from fisheye lens with
            // given parameters calculated above.
            K[0][0] = K[0][0]/2;
            K[0][2] = K[0][2]/2;
            K[1][1] = K[1][1]/2;
            K[1][2] = K[1][2]/2;
            vpiWarpMapGenerateFromPolynomialLensDistortionModel(K, X, K, &distModel, &map_resize);
        }
        else 
        {
            // Generate a warp map to undistort an image taken from fisheye lens with
            // given parameters calculated above.
            vpiWarpMapGenerateFromPolynomialLensDistortionModel(K, X, K, &distModel, &map);
        }

        CHECK_VPI_STATUS(vpiStreamCreate(0, &stream));

        // Create the Remap payload for undistortion given the map generated above.
        VPIPayload remap;
        if (use_compress_)
        {
            CHECK_VPI_STATUS(vpiCreateRemap(VPI_BACKEND_VIC, &map_resize, &remap));
        } 
        else 
        {
            CHECK_VPI_STATUS(vpiCreateRemap(VPI_BACKEND_VIC, &map, &remap));
        }
        

        // sensor_msgs::Image image_msg;
        // image_msg.header.stamp = ros::Time::now();
        // image_msg.header.frame_id = "senyun";
        // image_msg.height = 1080;
        // image_msg.width = PIXL_WIDTH;
        // image_msg.encoding = "bgr8";
        // image_msg.is_bigendian = false;
        // image_msg.step = static_cast<sensor_msgs::Image::_step_type>(PIXL_WIDTH * 3);
        // image_msg.data.resize(PIXL_WIDTH * 1080 * 3);

        while (ros::ok())
        {
            // monitor_info_publish(0);
            
            gettimeofday(&ts1, NULL);
            // printf("\n[%lu.%lu]\tbefore get picture\n", ts1.tv_sec, ts1.tv_usec);
            fflush(stdout);

            int ret = camera_handler->read_frame();

            // Wrap it into a VPIImage. VPI won't make a copy of it, so the original
            // image must be in scope at all times.
            if (image_vpi_UYVY == nullptr)
            {
                // Now create a VPIImage that wraps it.
                cv::copyMakeBorder(input_image,input_image_new,0,0,(PIXL_WIDTH_N-PIXL_WIDTH)/2,(PIXL_WIDTH_N-PIXL_WIDTH)/2,0,cv::Scalar(128,0));
                CHECK_VPI_STATUS(vpiImageCreateOpenCVMatWrapper(input_image_new, 0, &image_vpi_UYVY));
            }
            else
            {
                cv::copyMakeBorder(input_image,input_image_new,0,0,(PIXL_WIDTH_N-PIXL_WIDTH)/2,(PIXL_WIDTH_N-PIXL_WIDTH)/2,0,cv::Scalar(128,0));
                CHECK_VPI_STATUS(vpiImageSetWrappedOpenCVMat(image_vpi_UYVY, input_image_new));
            }

            // UYUV to NV12ER
            CHECK_VPI_STATUS(vpiSubmitConvertImageFormat(stream, VPI_BACKEND_VIC, image_vpi_UYVY, image_vpi_NV12ER_in, 0));

            if (extern_use_compress_) 
            {
                CHECK_VPI_STATUS(vpiSubmitRescale(stream, VPI_BACKEND_VIC, image_vpi_NV12ER_in, image_vpi_NV12ER_in_resize, VPI_INTERP_LINEAR, VPI_BORDER_CLAMP, 0));
            }

            if (extern_use_undis_)
            {
                if (extern_use_compress_)
                {
                    // Undistorts the resized input image.
                    CHECK_VPI_STATUS(vpiSubmitRemap(stream, VPI_BACKEND_VIC, remap, image_vpi_NV12ER_in_resize, image_vpi_NV12ER_out_resize, VPI_INTERP_CATMULL_ROM,
                                                    VPI_BORDER_ZERO, 0));

                    CHECK_VPI_STATUS(vpiSubmitConvertImageFormat(stream, VPI_BACKEND_VIC, image_vpi_NV12ER_out_resize, image_vpi_BGRA8_resize, 0));
                    CHECK_VPI_STATUS(vpiSubmitConvertImageFormat(stream, VPI_BACKEND_CPU, image_vpi_BGRA8_resize, image_vpi_BGR8_resize, 0));
                }
                else 
                {
                    // Undistorts the input image.
                    CHECK_VPI_STATUS(vpiSubmitRemap(stream, VPI_BACKEND_VIC, remap, image_vpi_NV12ER_in, image_vpi_NV12ER_out, VPI_INTERP_CATMULL_ROM,
                                                VPI_BORDER_ZERO, 0));
                    CHECK_VPI_STATUS(vpiSubmitConvertImageFormat(stream, VPI_BACKEND_VIC, image_vpi_NV12ER_out, image_vpi_BGRA8, 0));
                    CHECK_VPI_STATUS(vpiSubmitConvertImageFormat(stream, VPI_BACKEND_CPU, image_vpi_BGRA8, image_vpi_BGR8, 0));
                }
            } 
            else 
            {
                if (extern_use_compress_)
                {
                    CHECK_VPI_STATUS(vpiSubmitConvertImageFormat(stream, VPI_BACKEND_VIC, image_vpi_NV12ER_in_resize, image_vpi_BGRA8_resize, 0));
                    CHECK_VPI_STATUS(vpiSubmitConvertImageFormat(stream, VPI_BACKEND_CPU, image_vpi_BGRA8_resize, image_vpi_BGR8_resize, 0));
                }
                else 
                {
                    CHECK_VPI_STATUS(vpiSubmitConvertImageFormat(stream, VPI_BACKEND_VIC, image_vpi_NV12ER_in, image_vpi_BGRA8, 0));
                    CHECK_VPI_STATUS(vpiSubmitConvertImageFormat(stream, VPI_BACKEND_CPU, image_vpi_BGRA8, image_vpi_BGR8, 0));
                }
            }

            // Block the current thread until until the stream finishes all tasks submitted to it up till now.
            CHECK_VPI_STATUS(vpiStreamSync(stream));

            sensor_msgs::Image image_msg;
            image_msg.header.stamp = ros::Time::now();
            image_msg.header.frame_id = "senyun";
            image_msg.encoding = "bgr8";
            image_msg.is_bigendian = false;
            if (extern_use_compress_)
            {// for bev occ detection 
                image_msg.height = 1080 / 2;
                image_msg.width = PIXL_WIDTH_N / 2;
                image_msg.step = static_cast<sensor_msgs::Image::_step_type>(PIXL_WIDTH_N / 2 * 3);
                image_msg.data.resize(PIXL_WIDTH_N / 2 * 1080 / 2 * 3);

                memcpy(image_msg.data.data(), output_image_resize.ptr<uint8_t>(),
                       sizeof(uint8_t) * PIXL_WIDTH_N / 2 * 1080 / 2 * 3);
            }
            else
            {
                if (extern_use_undis_==false)// for calib
                {
                    image_msg.height = 1080;
                    image_msg.width = PIXL_WIDTH;
                    image_msg.step = static_cast<sensor_msgs::Image::_step_type>(PIXL_WIDTH * 3);
                    image_msg.data.resize(PIXL_WIDTH * 1080 * 3);

                    // 获取图像宽度和高度
                    int width = output_image.cols;
                    int height = output_image.rows;
                    int left = (PIXL_WIDTH_N-PIXL_WIDTH)/2;
                    int right = width - (PIXL_WIDTH_N-PIXL_WIDTH)/2;
                    // 定义裁剪后的图像区域
                    cv::Rect cropRegion(left, 0, right - left, height);
                    // 裁剪图像
                    cv::Mat croppedImage = output_image(cropRegion);
                    printf("crop img out: w %d,h %d :",croppedImage.rows,croppedImage.cols);
                    memcpy(image_msg.data.data(), croppedImage.ptr<uint8_t>(),
                        sizeof(uint8_t) * PIXL_WIDTH * 1080 * 3);
                }
                else
                {
                    image_msg.height = 1080;
                    image_msg.width = PIXL_WIDTH_N;
                    image_msg.step = static_cast<sensor_msgs::Image::_step_type>(PIXL_WIDTH_N * 3);
                    image_msg.data.resize(PIXL_WIDTH_N * 1080 * 3);
                    memcpy(image_msg.data.data(), output_image.ptr<uint8_t>(),
                        sizeof(uint8_t) * PIXL_WIDTH_N * 1080 * 3);
                }
            }

            // memcpy(image_msg.data.data(), (unsigned char *)outData.planes[0].data,
            //        sizeof(uint8_t) * PIXL_WIDTH * 1080 * 3);

            publishers_["im_topic"].publish(image_msg);
            printf("img out: w %d,h %d :",image_msg.width,image_msg.height);

            // cv::waitKey(1);
            gettimeofday(&ts2, NULL);
            // printf("\n[%lu.%lu]\tbefore get picture\n", ts2.tv_sec, ts2.tv_usec);
            long time_diff = (ts2.tv_sec - ts1.tv_sec) * 1000 + (ts2.tv_usec - ts1.tv_usec) / 1000;
            printf("\n get image time: %ld\n", time_diff);
            usleep(1000);
        }

        vpiStreamDestroy(stream);
        vpiImageDestroy(image_vpi_UYVY);
        vpiImageDestroy(image_vpi_NV12ER_in);
        vpiImageDestroy(image_vpi_NV12ER_out);
        vpiImageDestroy(image_vpi_BGRA8);
        vpiImageDestroy(image_vpi_BGR8);
        camera_handler->close_camera();
    }

    void SenyunDriver::get_a_frame()
    {
        // struct timeval ts;
        // struct v4l2_buffer buf;
        // CLEAR(buf);

        // buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        // buf.memory = V4L2_MEMORY_MMAP;

        // // get picture
        // //gettimeofday(&ts, NULL);
        // //printf("\n[%lu.%lu]\tbefore get picture\n", ts.tv_sec, ts.tv_usec);
        // if (-1 == xioctl(fd_video, VIDIOC_DQBUF, &buf))
        //     errno_exit("VIDIOC_DQBUF");
        // //gettimeofday(&ts, NULL);
        // //printf("[%lu.%lu]\tget image index = %d.\n", ts.tv_sec, ts.tv_usec, ++index_get);
        // //printf("[%lu.%lu]\tget image is ok.\n", ts.tv_sec, ts.tv_usec);

        // // deal with image data
        // // process_image(buffers[buf.index].start, buf.bytesused);
        // process_image_cuda(buffers[buf.index].start, buf.bytesused);
        // //gettimeofday(&ts, NULL);
        // //printf("[%lu.%lu]\tprocess image is ok\n", ts.tv_sec, ts.tv_usec);

        // cv::Mat output_im = cv::Mat::zeros(pixel_h/2, pixel_w/2, CV_8UC3);
        // //cv::Mat undistort_im = cv::Mat::zeros(pixel_h/2, pixel_w/2, CV_8UC3);
        // memcpy(output_im.ptr<uint8_t>(), show_buf, sizeof(uint8_t)*pixel_w*pixel_h*3/4);
        // //gettimeofday(&ts, NULL);
        // //printf("[%lu.%lu]\tcopy to cv image is ok\n", ts.tv_sec, ts.tv_usec);

        // // if(use_undis_)
        // // {
        // //     undiundistort(show_buf,output_im, param_, undis_input_gpu_, undis_output_gpu_, pixel_h, pixel_w);
        // //     undistort_image(output_im, undistort_im);
        // //     output_im = undistort_im.clone();
        // // }

        // // cv::undistort(output_im, undistort_im, intrinsic_matrix, distort_);
        // // cv::imshow("origin", output_im);
        // // cv::imshow("undistort_im", undistort_im);
        // // cv::waitKey(10);
        // //if(cv::waitKey(10)==113){
        // //    cv::imwrite("/home/nvidia/test.png", output_im);
        // //}
        // // 这里需要添加一个图像压缩的接口compress_image
        // if(extern_use_compress_)
        // {

        // }
        // else
        // {
        //     sensor_msgs::Image image_msg;
        //     image_msg.header.stamp = ros::Time::now();
        //     image_msg.header.frame_id = "senyun";
        //     image_msg.height = output_im.rows;
        //     image_msg.width = output_im.cols;
        //     image_msg.encoding = "bgr8";
        //     image_msg.is_bigendian = false;
        //     image_msg.step = static_cast<sensor_msgs::Image::_step_type>(output_im.step);
        //     image_msg.data.resize(output_im.rows*output_im.cols*3);
        //     memcpy(image_msg.data.data(),output_im.ptr<uint8_t>(),
        //         sizeof(uint8_t)*output_im.rows*output_im.cols*3);
        //     publishers_["im_topic"].publish(image_msg);
        // }

        // //gettimeofday(&ts, NULL);
        // //printf("[%lu.%lu]\tpublish image is ok\n", ts.tv_sec, ts.tv_usec);

        // if (-1 == xioctl(fd_video, VIDIOC_QBUF, &buf))
        // 	errno_exit("VIDIOC_QBUF");
    }

    void SenyunDriver::undistort_image(cv::Mat &origin_im, cv::Mat &undis_im)
    {
        cv::cuda::GpuMat src(origin_im);
        cv::cuda::GpuMat distortion(origin_im.size(), origin_im.type());
        cv::Mat map1, map2;
        initUndistortRectifyMap(
            intrinsic_matrix, distort_, cv::Mat(),
            intrinsic_matrix, origin_im.size(),
            CV_32FC1, map1, map2);

        cv::cuda::GpuMat m_mapx;
        cv::cuda::GpuMat m_mapy;
        m_mapx = ::cv::cuda::GpuMat(map1);
        m_mapy = ::cv::cuda::GpuMat(map2);

        ::cv::cuda::remap(src, distortion, m_mapx, m_mapy, cv::INTER_LINEAR);
        distortion.download(undis_im);
    }

}

PLUGINLIB_EXPORT_CLASS(senyun_driver::SenyunDriver, nodelet::Nodelet);
