#include <iostream>
#include <stdio.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <stdlib.h>
#include <unistd.h>
#include <linux/videodev2.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <cstring>
#include <vector>

int main(){
	int fd = open("/dev/video0", O_RDWR);
	if( fd < 0){
	  std::cerr <<"Open video0" << std::endl;
  	  return -1;	  
	}else{
	  std::cout<< "Open video0 successfully"<<std::endl;
	}
	//查询设备属性
	struct v4l2_capability cap;
	memset(&cap, 0, sizeof(cap));
	if(ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0){
		std::cerr << "Cap video0 error" <<std::endl;
		close(fd);
		return -1;
	}else{
		std::cout<<cap.driver <<"\t"<< cap.card <<"\t"<< cap.bus_info << "\t" <<((cap.version >> 16) & 0xFF) <<"\t"<< ((cap.version >> 8) & 0xFF) <<"\t"<< (cap.version & 0xFF)<<std::endl;
	}
	//查看是否有视频捕获功能
	if(!(cap.capabilities & (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_CAPTURE_MPLANE))){   //V4L2_CAP_VIDEO_CAPTURE 单平面捕获  V4L2_CAP_VIDEO_CAPTURE_MPLANE 多平面捕获
		std::cerr <<"VIDEO_CAPTURE error" << std::endl;
		close(fd);
		return -1;
	}
	
	
	//查看是否有数据流控制模式
	if(!(cap.capabilities & V4L2_CAP_STREAMING)){
		std::cerr<< "cap_stream error" <<std ::endl;
		close(fd);
		return -1;
	}

	//查询并显示支持的所有帧模式
	struct v4l2_fmtdesc v4format;  //type V4L2_BUF_TYPE_VIDEO_CAPTURE 视频捕获模式
	v4format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;  
	v4format.index = 0; //MJPEG 0 YUYV 1
	if(ioctl(fd,VIDIOC_ENUM_FMT, &v4format) <0){
		close(fd);
		std::cerr <<"v4format error" <<std::endl;
	}else{
		unsigned char *p = (unsigned char *)(&v4format.pixelformat);
		std::cout<< v4format.index <<"\t" << v4format.flags << "\t" << v4format.description << " \t" << p[0] <<p[1]<<p[2]<<p[3]<<std::endl;
	}

	struct v4l2_frmsizeenum frmsize;
	memset(&frmsize, 0, sizeof(frmsize));
	frmsize.index = 0;
	frmsize.pixel_format = V4L2_PIX_FMT_UYVY;
	std::cout << "支持的分辨率 (UYVY):" << std::endl;
	if (ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) >= 0) {
	    if (frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
	        std::cout << frmsize.discrete.width << "x" << frmsize.discrete.height << std::endl;
	    }
	    frmsize.index++;
	}
	

	//设置视频捕获格式
	struct v4l2_format fmt;
	memset(&fmt,0, sizeof(fmt));
	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; //跟上面的多平面捕获一致
	fmt.fmt.pix_mp.width = 640;                    //宽度
	fmt.fmt.pix_mp.height = 480;				   //高度
	fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_UYVY;    //视频采集格式
	fmt.fmt.pix_mp.field = V4L2_FIELD_NONE;
    fmt.fmt.pix_mp.num_planes = 1;  // NV12 虽然是多平面格式，但内存连续，通常按单平面处理

	fmt.fmt.pix_mp.plane_fmt[0].bytesperline = 640 * 2; // UYVY: 2 bytes per pixel
	fmt.fmt.pix_mp.plane_fmt[0].sizeimage = 640 * 480 * 2; // Total buffer size
	if(ioctl(fd,VIDIOC_S_FMT, &fmt) < 0){
		close(fd);
		std::cerr<< "fmt error" <<std::endl;
	}

	//如果有必要可以设置   查看当前格式  （VIDIOC_G_FMT） 看是否设置成功 
	// 验证格式
	struct v4l2_format gfmt;
	memset(&gfmt, 0, sizeof(gfmt));
	gfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	if (ioctl(fd, VIDIOC_G_FMT, &gfmt) < 0) {
	    std::cerr << "获取格式失败: " << strerror(errno) << std::endl;
	    close(fd);
	    return -1;
	}
	std::cout << "格式: " << gfmt.fmt.pix_mp.width << "x" << gfmt.fmt.pix_mp.height
	          << ", 像素格式: " << (char*)&gfmt.fmt.pix_mp.pixelformat
	          << ", 平面数: " << gfmt.fmt.pix_mp.num_planes << std::endl;
	/*
	 *
	*/

	// 向驱动申请视频流数据的帧缓冲区 
	struct v4l2_requestbuffers req;
	memset(&req, 0, sizeof(req)); 
	req.count = 4; //申请4个缓存区个数  一般为不少于3个
	req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE ; //多平面 
	req.memory = V4L2_MEMORY_MMAP; //映射方式
	if(ioctl(fd, VIDIOC_REQBUFS, &req) <0){
		close(fd);
		std::cerr<< "req error" <<std::endl;
	}

	//定义一个结构体来映射每个缓冲帧
	struct Buffer {
		void* start;
		size_t length;
	};

	// 映射缓冲区（关键修正：多平面处理）
	std::vector<Buffer> buffers(req.count);
	std::vector<v4l2_plane> planes(fmt.fmt.pix_mp.num_planes);


	//获取每个缓冲的信息 并映射内存 
	for(unsigned int n_buffers = 0 ; n_buffers < req.count; n_buffers++){
		struct v4l2_buffer buf;
		memset(&buf, 0, sizeof(buf));
		buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; //同上 
		buf.memory = V4L2_MEMORY_MMAP; //映射方式
		buf.index = n_buffers;
		buf.m.planes = planes.data();
        buf.length = planes.size();

		if(ioctl (fd, VIDIOC_QUERYBUF, &buf) < 0){
			close(fd);
			std::cerr << "buf error" <<std::endl;
		}

		buffers[n_buffers].length = buf.m.planes[0].length;
		buffers[n_buffers].start = mmap(NULL, buf.m.planes[0].length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.planes[0].m.mem_offset);  // 映射内存
		
		if (buffers[n_buffers].start == MAP_FAILED) {
			std::cerr << "mmap error" <<std::endl;
			close(fd);
			return -1;
		}

		// 将缓冲区加入队列
		if(ioctl(fd, VIDIOC_QBUF, &buf) < 0){
			std::cerr <<" join buf error" <<std::endl;
			close(fd);
			return -1;
		}
	}
	
	//启动视频流
	enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	if(ioctl(fd, VIDIOC_STREAMON, &type) <0){
		std::cerr <<"show error" <<std::endl;
		close(fd);
		return -1;
	}

	//采集数据
	while(true){
		struct v4l2_buffer buf;
		memset(&buf, 0, sizeof(buf));
		buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
		buf.memory = V4L2_MEMORY_MMAP;
		buf.m.planes = planes.data();
		buf.length = planes.size();

		// 从缓冲区取出一个缓冲帧
		if(ioctl(fd, VIDIOC_DQBUF, &buf) < 0){
			std::cerr <<" get buf error"<<std::endl;
			close(fd);
			return -1;
		}
		static int frame_count = 0;
    	std::cerr << "Got frame " << frame_count++ << " from buffer " << buf.index << ", size: " << buf.m.planes[0].bytesused << std::endl;

		write(STDOUT_FILENO, buffers[buf.index].start, buf.m.planes[0].bytesused);

		// 重新将缓冲区加入队列
		if (ioctl(fd, VIDIOC_QBUF, &buf) < 0) {
			std::cerr << "VIDIOC_QBUF failed" << std::endl;
			break;
		}
	}

	// 停止视频流
    if (ioctl(fd, VIDIOC_STREAMOFF, &type) < 0) {
        std::cerr << "VIDIOC_STREAMOFF failed" << std::endl;
    }

    // 清理资源
    for (auto& buf : buffers) {
        if (buf.start != MAP_FAILED) {
            munmap(buf.start, buf.length);
        }
    }
    close(fd);
    return 0;
	

}

