#include "sv2_camera_node.h"
#include <cmath>
#include <fstream>
#include <sys/mman.h>
#include <sys/stat.h>
#include <linux/videodev2.h>
#include <sys/ioctl.h>
#include <fcntl.h>
#include <unistd.h>  
#include <sys/stat.h> 
#include <sys/mman.h>
#include "rgatest.hpp"

int sdivideo_fd = 0;

namespace sv2 {

using namespace cv;

CameraType MIPICameraCppNode_Rknn::getCameraType(const std::string &camera_string)
{
  std::unordered_map<std::string, CameraType> cameraTypeMap = {
    {"NONE", CameraType::NONE},
    {"WEBCAM", CameraType::WEBCAM},
    {"V4L2CAM", CameraType::V4L2CAM},
    {"MIPI", CameraType::MIPI},
    {"RTSP", CameraType::RTSP},
    {"VIDEO", CameraType::VIDEO},
    {"G1", CameraType::G1},
    {"Q10", CameraType::Q10},
    {"GX40", CameraType::GX40},
    {"SU17", CameraType::SU17}
  };
  auto it = cameraTypeMap.find(camera_string);
  if (it != cameraTypeMap.end())
  {
    return it->second;
  }
  return CameraType::NONE;
}

typedef struct
{
    unsigned int mLength;
    unsigned int mOffset;
    unsigned char mIndex;
    char* mStart;
}V4L2_BufferRecord;

V4L2_BufferRecord  v4l2_buffer_record_sdi[4] = { 0 };
int MIPICameraCppNode_Rknn::SensorMipiInit(const char* devname)
{
    int ret = 0;
    int i = 0;
    //1. 打开video 节点 
    // char* devname = "/dev/video0";
    sdivideo_fd = open(devname, O_RDWR, 0);
    if (sdivideo_fd < 0)
    {
        perror("打开设备失败");
        return -1;
    }

    // 2. 获取 相机属性
    struct v4l2_capability caps;
    ret = ioctl(sdivideo_fd, VIDIOC_QUERYCAP, &caps);
    if (ret < 0)
    {
        perror("获取属性失败");
        return -1;
    }
    // 3. 打印摄像头属性
    printf("open device %s fd=%d\n", devname, sdivideo_fd);
    printf("driver: %s\n", caps.driver);
    printf("Card: %s\n", caps.card);
    printf("Version: %u.%u.%u\n", (caps.version >> 16) & 0xFF,
        (caps.version >> 8) & 0xFF,
        (caps.version) & 0xFF);
    printf("All Caps: %08X\n", caps.capabilities);
    printf("Dev Caps: %08X\n", caps.device_caps);
    // 6. 配置摄像头输出格式
    struct v4l2_format format;
    memset(&format, 0, sizeof(format));
    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    format.fmt.pix_mp.width = this->image_width;
    format.fmt.pix_mp.height = this->image_height;
    if (this->fourcc == "NV12")
    {
        format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12;
        this->imgSrc = Mat(this->image_height * 3 / 2, this->image_width, CV_8UC1);
    }
    else if (this->fourcc == "YUYV")
    {
        format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_YUYV;
        this->imgSrc = Mat(this->image_height, this->image_width, CV_8UC2);
    }
    else if (this->fourcc == "UYVY")
    {
        format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_UYVY;
        this->imgSrc = Mat(this->image_height, this->image_width, CV_8UC2);
    }
    else
    {
        printf("******其他格式暂不支持使用******\n");
        return -1;
    }
    format.fmt.pix_mp.field = V4L2_FIELD_NONE; 

    if (ioctl(sdivideo_fd, VIDIOC_S_FMT, &format) < 0)
    {
        perror("配置mipi相机格式失败");

    }

    // 7. 获取摄像头输出格式
    memset(&format, 0, sizeof(format));
    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    if (ioctl(sdivideo_fd, VIDIOC_G_FMT, &format) == 0)
    {
        printf("[%s] Current output format:  fmt=0x%X, %dx%d\n", __FUNCTION__,
            format.fmt.pix_mp.pixelformat,
            format.fmt.pix_mp.width,
            format.fmt.pix_mp.height);
    }
    else
    {
        perror("获取sdi相机格式失败");
        return -1;
    }

    // 8. 配置 V4L2 驱动准备 Stream Buffers
    struct v4l2_requestbuffers bufrequest;
    bufrequest.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    bufrequest.memory = V4L2_MEMORY_MMAP;
    bufrequest.count = 4;

    if (ioctl(sdivideo_fd, VIDIOC_REQBUFS, &bufrequest) < 0) {
        perror("配置缓存失败");
        close(sdivideo_fd);
        return -1;
    }

    // 9. 映射 Stream Buffers 信息
    struct v4l2_buffer buffer;
    struct v4l2_plane  planes;

    memset(&buffer, 0, sizeof(struct v4l2_buffer));
    memset(&planes, 0, sizeof(struct v4l2_plane));


    //查询映射地址和大小
    for (i = 0; i < 4; i++) {
        // Get the information on the buffer that was created for us

        buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
        buffer.memory = V4L2_MEMORY_MMAP;
        buffer.index = i;
        buffer.length = 1;		// 1
        buffer.m.planes = &planes;

        if (ioctl(sdivideo_fd, VIDIOC_QUERYBUF, &buffer) < 0) {
            perror("查询内核空间失败");
            close(sdivideo_fd);
            return -1;
        }

        v4l2_buffer_record_sdi[i].mLength = buffer.m.planes->length;
        v4l2_buffer_record_sdi[i].mOffset = (size_t)buffer.m.planes->m.mem_offset;
        v4l2_buffer_record_sdi[i].mIndex = i;

        v4l2_buffer_record_sdi[i].mStart = (char*)mmap(NULL, v4l2_buffer_record_sdi[i].mLength,
            PROT_READ | PROT_WRITE, MAP_SHARED, sdivideo_fd,
            v4l2_buffer_record_sdi[i].mOffset);
        if (v4l2_buffer_record_sdi[i].mStart == NULL) {
            printf("mmap failed!!!!!\n");
            close(sdivideo_fd);
            return -1;
        }
    }
    //将申请的缓冲区入队
    for (i = 0; i < 4; i++)
    {
        memset(&buffer, 0, sizeof(buffer));
        memset(&planes, 0, sizeof(planes));

        buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
        buffer.memory = V4L2_MEMORY_MMAP;
        buffer.m.planes = &planes;
        buffer.index = i;
        buffer.length = 1;		// 1
        buffer.m.planes->length = v4l2_buffer_record_sdi[i].mLength;
        buffer.m.planes->m.mem_offset = (unsigned long)v4l2_buffer_record_sdi[i].mStart;

        printf("VIDIOC_QBUF index=%d, length=%d \n", buffer.index, buffer.length);
        if (ioctl(sdivideo_fd, VIDIOC_QBUF, &buffer) < 0)
        {
            perror("映射缓冲区入队失败");
            close(sdivideo_fd);
            return -1;
        }
    }

    // 10. Stream ON
    printf("VIDIOC_STREAMON Start\n");
    int type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    if (ioctl(sdivideo_fd, VIDIOC_STREAMON, &type) < 0)
    {
        perror("视频流开启失败");
        close(sdivideo_fd);
        return -1;
    }

    return 0;
}

void MIPICameraCppNode_Rknn::run()
{
  auto start_time = std::chrono::steady_clock::now();
  if (R0 < 0)
  {
      printf("open video error\n");
      return;
  }
  struct v4l2_buffer buffer;
  struct v4l2_plane planes;
  struct timeval start_time1, end_time;
  float timeuse = 0.0f, timemax = 0.0f, timemin = 99.0f;
  gettimeofday(&start_time1, NULL);
  _IMGINFO imgSrc, imgDst;
  int ret = 0;
  imgSrc.w = this->image_width;
  imgSrc.h = this->image_height;
  bool fifoImg = true;
  imgSrc.rect.width = 0;
  imgSrc.rect.height = 0;
  imgSrc.rect.x = 0;
  imgSrc.rect.y = 0;
  imgDst.w = this->image_width;
  imgDst.h = this->image_height;
  imgDst.format = RK_FORMAT_BGR_888;//RK_FORMAT_RGB_888;//RK_FORMAT_YCbCr_420_SP;//;
  while (this->is_running())
  {
    memset(&buffer, 0, sizeof(buffer));
    // memset(&planes, 0 , sizeof(planes));
    buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    buffer.memory = V4L2_MEMORY_MMAP;

    buffer.m.planes = &planes;
    buffer.length = 1;   //2;
    // Wait for a buffer to be ready
    //printf("to get img\n");
    if (ioctl(sdivideo_fd, VIDIOC_DQBUF, &buffer) < 0) {
        perror("获取队列中的图像失败");
        close(sdivideo_fd);
        break;
    }
    fifoImg = !fifoImg;

    if(this->fps < 50.0 && fifoImg == false)
    {
        if (ioctl(sdivideo_fd, VIDIOC_QBUF, &buffer) < 0) {
            perror("入对buff失败");
            close(sdivideo_fd);
            break;
        }
        continue;
    }
    else
    {
        //memcpy(this->imgSrc.data, v4l2_buffer_record_sdi[buffer.index].mStart, v4l2_buffer_record_sdi[buffer.index].mLength);
        //this->imgSrc.data = v4l2_buffer_record_sdi[buffer.index].mStart;

        if (this->fourcc == "NV12")
        {
            imgSrc.format = RK_FORMAT_YCbCr_420_SP;//RK_FORMAT_RGBA_8888;//RK_FORMAT_RGB_888;//
        }
        else if (this->fourcc == "YUYV")
        {
            imgSrc.format = RK_FORMAT_YUYV_422;//RK_FORMAT_RGBA_8888;//RK_FORMAT_RGB_888;//
        }
        else if (this->fourcc == "UYVY")
        {
            imgSrc.format = RK_FORMAT_UYVY_422;//RK_FORMAT_RGBA_8888;//RK_FORMAT_RGB_888;//
        }
        else
        {
            printf("******其他格式暂不支持使用******\n");
            break;
        }

        img_ = Mat(this->image_height, this->image_width, CV_8UC3);
        imgSrc.imgData = (unsigned char*)v4l2_buffer_record_sdi[buffer.index].mStart;
        imgDst.imgData = img_.data;
        ret = rga_cvtcolor(imgSrc, imgDst);
        if (ret < 0)
        {
            printf("error img\n");
            continue;
        }
    }
    if (ioctl(sdivideo_fd, VIDIOC_QBUF, &buffer) < 0) {
        perror("入对buff失败");
        close(sdivideo_fd);
        break;
    }
#if 0
    gettimeofday(&end_time, NULL);
    timeuse = (end_time.tv_sec - start_time1.tv_sec) * 1000 + ((float)end_time.tv_usec - start_time1.tv_usec) / 1000; //ms
    if(timeuse > timemax)
	    timemax = timeuse;
    if(timeuse < timemin)
	    timemin = timeuse;
    printf("get img time %0.2f, max%0.2f min%0.2f\n", timeuse, timemax, timemin);
    gettimeofday(&start_time1, NULL);
#endif
    if ((this->image_width > 0 && img_.cols != this->image_width) || (this->image_height > 0 && img_.rows != this->image_height))
    {
      if (this->image_width <= 0)  this->image_width = img_.cols;
      if (this->image_height <= 0)  this->image_height = img_.rows;
      cv::resize(img_, img_, cv::Size(this->image_width, this->image_height));
    }
     //cv::imshow("image", img_);
     //cv::waitKey(5);
     //continue;
    nlohmann::json img_msg = _image_pub.cvimg2sms_mem(img_);
    _image_pub.publish(img_msg);

    auto current = std::chrono::steady_clock::now();
    std::chrono::duration<double> elapsed_seconds = current - start_time;
    if (elapsed_seconds.count() >= 1.0)
    {
      nlohmann::json calib_msg = sms::def_msg("sensor_msgs::CameraCalibration");
      calib_msg["type"] = "sensor_msgs::CameraCalibration";
      calib_msg["frame_id"] = this->frame_id;
      calib_msg["width"] = this->image_width;
      calib_msg["height"] = this->image_height;
      calib_msg["distortion_model"]= "plumb_bob";
      calib_msg["K"] = this->camera_matrix;
      calib_msg["D"] = this->distortion_coefficients;
      calib_msg["P"] = this->projection;

      // 发送sensor_msgs::CameraCalibration话题
      _calib_pub.publish(calib_msg);

      start_time = current;
    }
  }
  //停止采集
  int type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
  if (ioctl(sdivideo_fd, VIDIOC_STREAMOFF, &type) < 0)
  {
      perror("视频流关闭失败");
  }
  //释放映射
  for (int i = 0; i < 4; i)munmap(v4l2_buffer_record_sdi[i].mStart, v4l2_buffer_record_sdi[i].mLength);
  close(sdivideo_fd);
  printf("!!end mipi_getimg\n");
}

}
