#include "v4l2.h"

VideoDevice::VideoDevice()
{

}

VideoDevice::~VideoDevice()
{
 
}

//1.打开设备    
int VideoDevice::Init(const char*argv[])
{
    int fd = open(argv[1],O_RDWR);
    if(fd < 0)
    {
        ERROR("open video failed");
        return -1;
    }
    iFd = fd;
    Print_GREEN("%s open success", argv[1]);
    return 0;
}
/*
查询设备属性需要使用struct v4l2_capability结构体，该结构体描述了视频采集设备的driver信息
struct v4l2_capability
{
	u8 driver[16];      // 驱动名字
	u8 card[32];        // 设备名字
	u8 bus_info[32];    // 设备在系统中的位置
	u32 version;        // 驱动版本号
	u32 capabilities;   // 设备支持的操作
	u32 reserved[4];    // 保留字段
};

其中最重要的是capabilities字段，这个字段标记着v4l2设备的功能，capabilities有以下部分标记位
ID                          描述符
V4L2_CAP_VIDEO_CAPTURE      设备支持捕获功能
V4L2_CAP_VIDEO_OUTPUT       设备支持输出功能
V4L2_CAP_VIDEO_OVERLAY      设备支持预览功能
V4L2_CAP_STREAMING          设备支持流读写
V4L2_CAP_READWRITE          设备支持read、write方式读写
*/
//2.查询设备功能
int VideoDevice::v4l2_cap()
{
    struct v4l2_capability cap;
    memset(&cap, 0, sizeof(struct v4l2_capability));
    if(ioctl(iFd, VIDIOC_QUERYCAP, &cap))
    {
       ERROR("ioctl VIDIOC_QUERYCAP(vidioc_querycap) failed");
       return -1;
    }

    if(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
        Print_GREEN("v4l2 dev support capture_mplane");

    if(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)
        Print_GREEN("v4l2 dev support capture");

    if(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)
        Print_GREEN("v4l2 dev support output_mplane");

    if(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)
        Print_GREEN("v4l2 dev support output");

    if(cap.capabilities & V4L2_CAP_VIDEO_OVERLAY)
        Print_GREEN("v4l2 dev support overlay");

    if(cap.capabilities & V4L2_CAP_STREAMING)
        Print_GREEN("v4l2 dev support streaming");

    if(cap.capabilities & V4L2_CAP_READWRITE)
        Print_GREEN("v4l2 dev support read write");

    std::cout << "driver = " << cap.driver << std::endl;
    std::cout << "card = " << cap.card << std::endl;
    std::cout << "bus_info = " << cap.bus_info << std::endl;
    std::cout << "version = " << (cap.version>>16&0xff) << "." << (cap.version>>8&0xff) << "." << (cap.version&0xff) << std::endl;

    return 0;
}

//3.设置输入设备
int VideoDevice::v4l2_input()
{
//(1)枚举输入设备
    struct v4l2_input input;
    memset(&input, 0, sizeof(struct v4l2_input));
    input.index = 0;
    while(!ioctl(iFd, VIDIOC_ENUMINPUT, &input))
    {
        std::cout << "index = " << input.index << std::endl;
        std::cout << "input = " << input.name << std::endl;
        ++input.index;
    }
//(2)设置输入设备
    input.index = 0;
    input.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;//V4L2_BUF_TYPE_VIDEO_CAPTURE
    if(ioctl(iFd, VIDIOC_S_INPUT, &input))
    {
       ERROR("ioctl VIDIOC_S_INPUT(vidioc_s_input) failed");
       return -1;
    }

    Print_GREEN("VIDIOC_S_INPUT success");
    return 0;
}

/*
​显示所有支持的格式需要用到struct v4l2_fmtdesc结构体，该结构体描述当前camera支持的格式信息
struct v4l2_fmtdesc
{
    __u32 index;               // 要查询的格式序号，应用程序设置
    enum v4l2_buf_type type;   // 帧类型，应用程序设置
    __u32 flags;               // 是否为压缩格式
    __u8 description[32];      // 格式名称
    __u32 pixelformat;         // 所支持的格式
    __u32 reserved[4];         // 保留
};
设置图像格式需要用到struct v4l2_format结构体，该结构体描述每帧图像的具体格式，包括帧类型以及图像的长、宽等信息
struct v4l2_format
{
    enum v4l2_buf_type type;          // 帧类型，应用程序设置
    union fmt
    {
        struct v4l2_pix_format pix;   // 视频设备使用
        structv 4l2_window win;
        struct v4l2_vbi_format vbi;
        struct v4l2_sliced_vbi_format sliced;
        __u8 raw_data[200];
    };
};
*/
//4.设置图像格式
int VideoDevice::v4l2_fmt()
{
//(1)枚举支持的像素格式
    struct v4l2_fmtdesc fmtdesc;
    memset(&fmtdesc, 0, sizeof(struct v4l2_fmtdesc));
    fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmtdesc.index = 0;
    while(!ioctl(iFd, VIDIOC_ENUM_FMT, &fmtdesc))
    {   
        printf("index = %d\n", fmtdesc.index);
		printf("flags = %d\n", fmtdesc.flags);
        printf("reserved = %d\n", fmtdesc.reserved[0]);
		printf("descrrption = %s\n", fmtdesc.description);
		unsigned char *p = (unsigned char*)&fmtdesc.pixelformat;
		printf("pixelformat = %c%c%c%c\n", p[0],p[1],p[2],p[3]);
        fmtdesc.index++;
    }
//(2)设置像素格式
    struct v4l2_format v4l2_fmt;
    memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
    v4l2_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    v4l2_fmt.fmt.pix.width = WIDTH;
    v4l2_fmt.fmt.pix.height = HEIGHT;
#ifdef YUYV 
    v4l2_fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
#elif defined(MJPEG)
    v4l2_fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
#else
#endif
    v4l2_fmt.fmt.pix.field = V4L2_FIELD_ANY; //视频的扫描方式
    if (ioctl(iFd, VIDIOC_S_FMT, &v4l2_fmt))
    {
        ERROR("ioctl VIDIOC_S_FMT(vidioc_s_fmt) failed");
        return -1;
    }

    memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
    v4l2_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (ioctl(iFd, VIDIOC_G_FMT, &v4l2_fmt))
    {
        ERROR("ioctl VIDIOC_G_FMT(vidioc_G_fmt) failed");
        return -1;
    }

    printf("v4l2_fmt.fmt.pix.width = %d\n",v4l2_fmt.fmt.pix.width);
    printf("v4l2_fmt.fmt.pix.height = %d\n",v4l2_fmt.fmt.pix.height);
    unsigned char* p = (unsigned char *)&v4l2_fmt.fmt.pix.pixelformat;
    printf("v4l2_fmt.fmt.pix.pixelformat = %c%c%c%c\n",p[0],p[1],p[2],p[3]);

    iHeight = v4l2_fmt.fmt.pix.height;
    iWidth = v4l2_fmt.fmt.pix.width;
    iPixelFormat = v4l2_fmt.fmt.pix.pixelformat;

    Print_GREEN("设置像素格式 success");
    return 0;
}

/*    
相关结构体如下，该结构体描述申请的缓冲区的基本信息
struct v4l2_requestbuffers
{
    __u32 count;                    // 缓冲区内缓冲帧的数目
    enum v4l2_buf_type type;        // 缓冲帧数据格式
    enum v4l2_memorymemory;         // 区别是内存映射还是用户指针方式
    __u32 reserved[2];
};
相关结构体如下，该结构体表示一帧图像数据的基本信息，包含序号、缓冲帧长度和缓冲帧地址等信息
struct v4l2_buffer
{
    __u32 index;                    //buffer 序号
    enum v4l2_buf_type type;        //buffer 类型
    __u32 byteused;                 //buffer 中已使用的字节数
    __u32 flags;                    // 区分是MMAP 还是USERPTR
    enum v4l2_field field;
    struct timeval timestamp;       // 获取第一个字节时的系统时间
    struct v4l2_timecode timecode;
    __u32 sequence;                 // 队列中的序号
    enum v4l2_memory memory;        //IO 方式，被应用程序设置
    union m
    {
        __u32 offset;               // 缓冲帧地址，只对MMAP 有效
        unsigned long userptr;
    };
    __u32 length;                   // 缓冲帧长度
    __u32 input;
    __u32 reserved;
};
void *mmap(void*addr, size_t length, int prot, int flags, int fd, off_t offset);
(1.addr：映射起始地址，一般为NULL，让内核自动选择；
(2.length：被映射内存块的长度；
(3.prot：标志映射后能否被读写，其值为PROT_EXEC,PROT_READ,PROT_WRITE,PROT_NONE；
(4.flags：确定此内存映射能否被其他进程共享，可设置为MAP_SHARED或MAP_PRIVATE；
(5.fd：设备文件句柄；
(6.offset：确定映射后的内存地址
*/
//5.设置缓存
int VideoDevice::v4l2_reqbufs()
{
//(1)申请缓存
    struct v4l2_requestbuffers req;
    memset(&req, 0, sizeof(struct v4l2_requestbuffers));
    req.count = NB_BUFFER;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;
    if (ioctl(iFd, VIDIOC_REQBUFS, &req))
    {
        ERROR("ioctl VIDIOC_REQBUFS(vidioc_reqbufs) failed");
        return -1;
    }

    Print_GREEN("申请缓存 success");

    iVideoBufCnt = NB_BUFFER;
//(2)映射缓存
    struct v4l2_buffer v4l2_buffer;
    for (int i = 0; i < NB_BUFFER; i++)
    {
        memset(&v4l2_buffer, 0, sizeof(struct v4l2_buffer));
        v4l2_buffer.index = i;
        v4l2_buffer.type   = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        v4l2_buffer.memory = V4L2_MEMORY_MMAP;
        if (ioctl(iFd, VIDIOC_QUERYBUF, &v4l2_buffer))
        {
            ERROR("ioctl VIDIOC_QUERYBUF(vidioc_querybuf) failed");
            return -1;
        }

        pucVideBuf[i] = (unsigned char*)mmap(NULL /* start anywhere */ ,
                v4l2_buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, iFd,
                v4l2_buffer.m.offset);
        if (pucVideBuf[i] == MAP_FAILED)
        {
            ERROR("mmap failed");
            return -1;
        }
    }
    
    iVideoBufMaxLen = v4l2_buffer.length;
    std::cout << "v4l2_buffer.length = " << v4l2_buffer.length << std::endl;

    Print_GREEN("映射缓存 success");
//(3)缓存帧放入队列
    for (int i = 0; i < NB_BUFFER; i++)
    {
        memset(&v4l2_buffer, 0, sizeof(struct v4l2_buffer));
        v4l2_buffer.index = i;
        v4l2_buffer.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        v4l2_buffer.memory = V4L2_MEMORY_MMAP;
        if(ioctl(iFd, VIDIOC_QBUF, &v4l2_buffer))
        {
            ERROR("ioctl VIDIOC_QBUF(vidioc_qbuf) failed");
            return -1;
        }
    }

    Print_GREEN("缓存帧放入队列 success");
    return 0;
}

//6.启动捕捉图像数据
int VideoDevice::v4l2_streamon()
{
   enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
   if(ioctl(iFd, VIDIOC_STREAMON, &type))
   {
        ERROR("ioctl VIDIOC_STREAMON(vidioc_streamon) failed");
        return -1;
   }

   Print_GREEN("启动捕捉图像数据 success"); 
   return 0;
}

/*
struct pollfd {
    int   fd;         // 文件描述符 
    short events;     // 请求的事件 
    short revents;    // 返回的事件 
};
*/
//7.出列采集的帧缓冲，并处理图像数据，然后再将数据帧入列
struct v4l2_buffer v4l2_buffer;
int VideoDevice::v4l2_dqbuf()
{
    struct pollfd fds[1];
    memset(fds, 0, sizeof(fds));
    fds[0].fd = iFd;
    fds[0].events = POLLIN;

    if(poll(fds, 1, -1) == -1)
    {
        if(errno == EINTR)
        {   
            return -1;
        }
        ERROR("poll failed");
        return -1;
    }

    memset(&v4l2_buffer, 0, sizeof(struct v4l2_buffer));
    v4l2_buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    v4l2_buffer.memory = V4L2_MEMORY_MMAP;
    if(ioctl(iFd, VIDIOC_DQBUF, &v4l2_buffer))
    {
        if (errno == EAGAIN)
        {
            return -1;   // 没有可出列的缓冲帧
        }
        ERROR("ioctl VIDIOC_DQBUF(vidioc_dqbuf) failed");
        return -1;
    }

    iVideoBufCurIndex = v4l2_buffer.index;
    // Print_GREEN("出列采集的帧缓冲%d success",v4l2_buffer.index);

    return 0;
}

int VideoDevice::v4l2_qbuf()
{   
    if(ioctl(iFd, VIDIOC_QBUF, &v4l2_buffer))
    {
        ERROR("ioctal VIDIOC_QBUF(vidioc_qbuf) failed");
        return -1;
    }
    // Print_GREEN("数据帧入列 success");
    return 0;
}

//8.关闭设备
int VideoDevice::v4l2_streamoff()
{
    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (ioctl(iFd, VIDIOC_STREAMOFF, &type) < 0)
    {
        ERROR("ioctl VIDIOC_STREAMOFF(vidioc_streamoff) failed");
        return -1;
    }

    for(int i = 0; i < NB_BUFFER; i++)
    {
        if(pucVideBuf[i])
        {
            munmap(pucVideBuf[i], iVideoBufMaxLen);
            pucVideBuf[i] = NULL;
        }

        if (rgb_data[i] != nullptr) 
        {
            delete[] rgb_data[i];
            rgb_data[i] = nullptr;
        }
    }

    close(iFd);

    Print_GREEN("关闭设备v4l2 success");
    
    return 0;
}
