#include <stdio.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <string.h>
#include <unistd.h>
#include <poll.h>
#include <list>
#include <vector>
#include <sys/ioctl.h>
#include <sys/mman.h>

#include "cache.h"
#include "camera.h"

// #include "image_utils.h"
// #include "dma_alloc.h"


/*摄像头应用编程框架*/
int Video_Init(char *dev,struct video *video_info)
{
    /*1.打开摄像设备文件*/
	int video_fd = open(dev,O_RDWR);
	if(video_fd<0)
        return -1;

    /*2.图像数据格式*/
	struct v4l2_format video_format;
    struct v4l2_requestbuffers video_requestbuffers;

	memset(&video_format,0,sizeof(struct v4l2_format));
	video_format.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;//捕获格式
	video_format.fmt.pix.width=640;
	video_format.fmt.pix.height=480;
	video_format.fmt.pix.pixelformat=V4L2_PIX_FMT_YUYV;
	if(ioctl(video_fd,VIDIOC_S_FMT,&video_format))
        return -2;
	video_info->width=video_format.fmt.pix.width;
	video_info->height=video_format.fmt.pix.height;
    printf("图像尺寸:%d * %d\n",video_info->width,video_info->height);

    /*3.申请空间*/
	memset(&video_requestbuffers,0,sizeof(struct v4l2_requestbuffers));
	video_requestbuffers.count=4;//缓冲区个数
	video_requestbuffers.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;//V4L2捕获框架格式
	video_requestbuffers.memory=V4L2_MEMORY_MMAP;//内存映射
	if(ioctl(video_fd,VIDIOC_REQBUFS,&video_requestbuffers))
        return -3;
	printf("缓冲区个数:%d\n",video_requestbuffers.count);
	/*4.将缓冲映射到进程空间*/
	int i=0;
	struct v4l2_buffer video_buffer;
	for(i=0;i<video_requestbuffers.count;i++){
		memset(&video_buffer,0,sizeof(struct v4l2_buffer));
		video_buffer.index=i;//
		video_buffer.memory=V4L2_MEMORY_MMAP;//内存映射
		video_buffer.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;//V4L2捕获框架格式
		if(ioctl(video_fd,VIDIOC_QUERYBUF,&video_buffer))
            return -4;
		video_info->mmap_size=video_buffer.length;/*映射大小*/
        video_info->mmapbuf[i]=(char *)mmap(NULL,video_buffer.length,
                                            PROT_READ|PROT_WRITE,MAP_SHARED,
                                            video_fd,
                                            video_buffer.m.offset);          
	}
	/*5.将缓冲区添加到采集队列*/
	for(i=0;i<video_requestbuffers.count;i++){
		memset(&video_buffer,0,sizeof(struct v4l2_buffer));
		video_buffer.index=i;//
		video_buffer.memory=V4L2_MEMORY_MMAP;//内存映射
		video_buffer.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;//V4L2V4L2捕获框架格式
		if(ioctl(video_fd,VIDIOC_QBUF,&video_buffer))
            return -5;
	}     
	/*开启摄像头*/
	int type=V4L2_BUF_TYPE_VIDEO_CAPTURE;//V4L2V4L2捕获框架格式
	if(ioctl(video_fd,VIDIOC_STREAMON,&type)){
		printf("摄像头开启失败\n");
		return -6;
	}
	return video_fd;    
}


/*YUYV转RGB888*/
void yuv_to_rgb(unsigned char *yuv_buffer,unsigned char *rgb_buffer,int iWidth,int iHeight)
{
	int x;
	int z=0;
	unsigned char *ptr = rgb_buffer;
	unsigned char *yuyv= yuv_buffer;
	for (x = 0; x < iWidth*iHeight; x++)
	{
		int r, g, b;
		int y, u, v;
		if (!z)
		y = yuyv[0] << 8;
		else
		y = yuyv[2] << 8;
		u = yuyv[1] - 128;
		v = yuyv[3] - 128;
		b = (y + (359 * v)) >> 8;
		g = (y - (88 * u) - (183 * v)) >> 8;
		r = (y + (454 * u)) >> 8;
		*(ptr++) = (b > 255) ? 255 : ((b < 0) ? 0 : b);
		*(ptr++) = (g > 255) ? 255 : ((g < 0) ? 0 : g);
		*(ptr++) = (r > 255) ? 255 : ((r < 0) ? 0 : r);
		if(z++)
		{
			z = 0;
			yuyv += 4;
		}
	}
}


camera::camera(const char *dev):path(dev),
						cam_inf(std::make_shared<camera::camera_info>())
{
	memset(&video_format,0,sizeof(struct v4l2_format));
	memset(&video_requestbuffers,0,sizeof(struct v4l2_requestbuffers));
	
}
#define RK_FORMAT_YCbCr_444_SP (0x17 << 8)
const camera::PixelFormatPriority camera::priority_formats[] = {
	
	{V4L2_PIX_FMT_RGB24, "RGB24"
		#ifdef TARGET_RK356X 
				,RK_FORMAT_RGB_888
		#endif
	},
	{V4L2_PIX_FMT_BGR24, "BGR24"
		#ifdef TARGET_RK356X 
				,RK_FORMAT_BGR_888
		#endif
	},
	
	{V4L2_PIX_FMT_NV12, "NV12"
		#ifdef TARGET_RK356X 
				,RK_FORMAT_YCrCb_420_SP
		#endif
	},
	{V4L2_PIX_FMT_NV21, "NV21"
		#ifdef TARGET_RK356X 
				,RK_FORMAT_YCbCr_420_SP
		#endif
	},
	{V4L2_PIX_FMT_NV16, "NV16"
		#ifdef TARGET_RK356X 
				,RK_FORMAT_YCbCr_422_SP
		#endif
	},
	{V4L2_PIX_FMT_NV24, "NV24"
		#ifdef TARGET_RK356X 
				// ,RK_FORMAT_YCrCb_422_SP //这边没有找到对应的枚举
				,RK_FORMAT_YCbCr_422_SP	
		#endif
	},
};
void camera::enumerate_formats_and_resolutions(int camera_fd,int type) 
{
	int  ret  = 0;
    struct v4l2_fmtdesc fmt;
    memset(&fmt, 0, sizeof(fmt));
    fmt.index = 0;
    fmt.type = type;//V4L2_BUF_TYPE_VIDEO_CAPTURE;
	ret  = ioctl(camera_fd, VIDIOC_ENUM_FMT, &fmt);
	if(ret){
		printf("获取格式enum失败  %d \n",ret);
	}
	std::vector<__u32> supported_formats;
    while (ioctl(camera_fd, VIDIOC_ENUM_FMT, &fmt) == 0) {
        printf("支持的格式: %s (四字符码: %c%c%c%c)\n", 
											fmt.description, 
											fmt.pixelformat & 0xFF, 
											(fmt.pixelformat >> 8) & 0xFF, 
											(fmt.pixelformat >> 16) & 0xFF, 
											(fmt.pixelformat >> 24) & 0xFF);
		supported_formats.push_back(fmt.pixelformat);	
        struct v4l2_frmsizeenum frmsize;
        memset(&frmsize, 0, sizeof(frmsize));
        frmsize.pixel_format = fmt.pixelformat;
        frmsize.index = 0;

        while (ioctl(camera_fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) == 0) {
            if (frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
                printf("  支持的分辨率: %dx%d\n", 
												frmsize.discrete.width, 
												frmsize.discrete.height);
            } else if (frmsize.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
                printf("  支持的分辨率范围: %dx%d 到 %dx%d，步长 %dx%d\n",
												frmsize.stepwise.min_width, 
												frmsize.stepwise.min_height,
												frmsize.stepwise.max_width, 
												frmsize.stepwise.max_height,
												frmsize.stepwise.step_width, 
												frmsize.stepwise.step_height);
            }
            frmsize.index++;
        }

        fmt.index++;
    }
	// 按照优先级查找最佳匹配
    for (int i = 0; i < sizeof(priority_formats) / sizeof(priority_formats[0]); i++) {
		for (const auto& fmt : supported_formats) {
			if (priority_formats[i].v4l2_format == fmt) {
			// 找到优先格式，设置为当前格式
            video_format.fmt.pix.pixelformat = priority_formats[i].v4l2_format;
#ifdef TARGET_RK356X 
			src_format = priority_formats[i].rk_rga_format;
#endif
				printf("使用优先格式: %s\n", priority_formats[i].description);
				break;
			}
		}
		if (video_format.fmt.pix.pixelformat) {
			break;
		}
		
    }
	if (video_format.fmt.pix.pixelformat == 0) {
		printf("未找到任何优先格式，使用设备默认格式。\n");
	}
	
}



camera::camera_info * camera::camera_init(uint32_t width,uint32_t height,camera::Cam_pixelformat pf,
											std::function<void(std::vector<char>, int ,int ,void *)> cam_recv,
											std::function<void(int)> recvError,
											void *p	)
{
	int i=0,ret = 0;
	struct v4l2_capability cap;
	
	struct v4l2_buffer video_buffer;	
	struct v4l2_plane planes[4];
	
	std::thread tread;	

	

	
	type=V4L2_BUF_TYPE_VIDEO_CAPTURE;// 默认使用单平面
	format__ = pf;
	/*1.打开摄像设备文件*/
	camera_fd = open(path,O_RDWR);
	if(camera_fd <0 ){
		printf("打开失败 :%s \n",path);
		return nullptr;
	}

	printf("打开成功 %d \n",camera_fd);
	if (0 != ioctl(camera_fd, VIDIOC_QUERYCAP, &cap)) {
        printf("Not v4l2 compatible\n");
        goto error;
    }
	printf("设备捕获能力：%x\n",cap.capabilities);

	if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) && !(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)) {
        printf("Capture not supported\n");
    }
 
    if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
        printf("Streaming IO Not Supported\n");
    }

	if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
		type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; // 如果支持多平面，则使用多平面
	}



    /*2.获取支持的格式*/ 
	enumerate_formats_and_resolutions(camera_fd,type);

	


    /*3.图像数据格式*/
	video_format.type=type;//捕获格式 
	video_format.fmt.pix.width=1920;  
	video_format.fmt.pix.height=1080;//  固定死分辨率

#ifdef TARGET_RK356X
	// src_format = RK_FORMAT_BGR_888;  
	dst_width = width;
	dst_height = height;
	dst_format =  RK_FORMAT_RGB_888;
	if(video_format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV24){ // --如果输入格式是NV24
		nv24_size = video_format.fmt.pix.width * video_format.fmt.pix.height *  3;
	}
	switch (pf)
	{
		case PIXEL_YUYV:
			dst_format = RK_FORMAT_YUYV_420;
		break;
		case PIXEL_NV12:
			dst_format = RK_FORMAT_YCrCb_420_SP;
		break;
		case PIXEL_NV21:
			dst_format = RK_FORMAT_YCbCr_420_SP;
			break;
		case PIXEL_RGB:
			dst_format =  RK_FORMAT_RGB_888;
			break;

	}
	if((video_format.fmt.pix.width != width) || src_format !=  dst_format){
		src_width = video_format.fmt.pix.width;
		src_height = video_format.fmt.pix.height;
		rk_rga_init(src_width,src_height,src_format,dst_width,dst_height,dst_format);
	}
#elif
	video_format.fmt.pix.width=width;   
	video_format.fmt.pix.height=height;
#endif

	//video_format.fmt.pix.pixelformat=V4L2_PIX_FMT_BGR24;// V4L2_PIX_FMT_YUYV
	video_format.fmt.pix.field = V4L2_FIELD_NONE;
	Cam_pielformat_in =video_format.fmt.pix.pixelformat;
	ret = ioctl(camera_fd,VIDIOC_S_FMT,&video_format);
	if(ret){
		printf("设置图像失败 :%d*%d   %d \n",width,height,ret);
		video_format.fmt.pix.pixelformat = V4L2_PIX_FMT_NV24;
#ifdef TARGET_RK356X 
		printf("使用NV24图像重新设置 :%d*%d   %d \n",width,height,ret);
		for(i = 0; priority_formats[i].rk_rga_format != 0; i++){
			if(priority_formats[i].v4l2_format == V4L2_PIX_FMT_NV24){
				src_format = priority_formats[i].rk_rga_format;
				break;
			}
		}
#endif
		ret = ioctl(camera_fd,VIDIOC_S_FMT,&video_format);
		if(ret){
			goto error;
		}
		
	}
	
    //video_format.fmt.pix.pixelformat=V4L2_PIX_FMT_NV12;//V4L2_PIX_FMT_YUYV;V4L2_PIX_FMT_NV12;V4L2_PIX_FMT_YUYV   
	cam_inf->width  = video_format.fmt.pix.width;
	cam_inf->height = video_format.fmt.pix.height;
	printf("图像尺寸:%d * %d  格式：%d \n",
					cam_inf->width,cam_inf->height,
					video_format.fmt.pix.pixelformat);

	/*4.申请空间*/
	video_requestbuffers.count=4;//缓冲区个数
	video_requestbuffers.type=type;//V4L2捕获框架格式
	video_requestbuffers.memory=V4L2_MEMORY_MMAP;//内存映射
	ret = ioctl(camera_fd,VIDIOC_REQBUFS,&video_requestbuffers);
	if(ret ){
		printf("申请缓冲区失败 :%d   %d \n",video_requestbuffers.count,ret);
		goto error;
	}
        
	printf("缓冲区个数:%d\n",video_requestbuffers.count);

	/*5.将缓冲映射到进程空间*/
	for(i=0;i<video_requestbuffers.count;i++){
		memset(&video_buffer,0,sizeof(struct v4l2_buffer));
		video_buffer.index=i;//
		video_buffer.memory=V4L2_MEMORY_MMAP;//内存映射
		video_buffer.type=type;//V4L2捕获框架格式

		if(type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE){
			video_buffer.m.planes = planes;
			video_buffer.m.planes[0].bytesused = 0;
			video_buffer.m.planes[1].bytesused = 0;
			video_buffer.m.planes[2].bytesused = 0;
			video_buffer.m.planes[3].bytesused = 0;
			video_buffer.length = VIDEO_MAX_PLANES;
		}
		if(ioctl(camera_fd,VIDIOC_QUERYBUF,&video_buffer)){
			printf("将缓冲映射到进程空间失败 \n");
			goto error;
		}
            
		

		
		if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
			cam_inf->mmapbuf[i] = (char *)mmap(NULL, video_buffer.length,
											   PROT_READ | PROT_WRITE, MAP_SHARED,
											   camera_fd, video_buffer.m.offset);
			cam_inf->mmap_size=video_buffer.length;/*映射大小*/								   
		} else if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
			cam_inf->mmapbuf[i] = (char *)mmap(NULL, video_buffer.m.planes[0].length,
											   PROT_READ | PROT_WRITE, MAP_SHARED,
											   camera_fd, video_buffer.m.planes[0].m.mem_offset);
			cam_inf->mmap_size=video_buffer.m.planes[0].length;/*映射大小*/	
		}	
		if(cam_inf->mmapbuf[i] == MAP_FAILED){
			printf("buffer mmap failed\n");
		}							
											

	}
	/*6.将缓冲区添加到采集队列*/
	for(i=0;i<video_requestbuffers.count;i++){
		memset(&video_buffer,0,sizeof(struct v4l2_buffer));
		video_buffer.index=i;//
		video_buffer.memory=V4L2_MEMORY_MMAP;//内存映射
		video_buffer.type=type;//V4L2V4L2捕获框架格式

		if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
			video_buffer.length = VIDEO_MAX_PLANES;
			video_buffer.m.planes = planes;
		}
		if(ioctl(camera_fd,VIDIOC_QBUF,&video_buffer)){
			printf("将缓冲区添加到采集队列失败 \n");
			goto error;
		}
	}  
	/*开启摄像头*/
	if(ioctl(camera_fd,VIDIOC_STREAMON,&type)){
		printf("摄像头开启失败\n");
		goto error;
	}

	thread_run = true;
	recv = cam_recv;

	
	
	printf("============ camera_fd %d \n",camera_fd);
	tread=  std::thread(
		[&]()->int{
		struct v4l2_buffer video_buff;
		struct v4l2_plane planes[VIDEO_MAX_PLANES];

		struct pollfd fds;
		fds.fd=camera_fd;//监听摄像头描述符
		fds.events=POLLIN;//读事件
		fds.revents=0;

		std::vector<char> cam_buf(cam_inf->mmap_size);
		CircularBuffer<std::vector<char>> cam_cache(max_buf_count);


		//--回调函数
		std::thread cb_thread([&](){
			std::vector<char> cam_temp(cam_inf->mmap_size*4);
			// printf("format__ = %d \n",format__);
			while(thread_run){
				if(recv){
					try{
						if(cam_cache.size()){
							if(video_format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV24){ // --如果输入格式是NV24
								std::vector<char> cam_nv16(cam_inf->mmap_size*3/2);
								nv24_to_nv16((uint8_t*)cam_cache.pop().data(), (uint8_t*)cam_nv16.data(), 
														video_format.fmt.pix.width, video_format.fmt.pix.height);
								if(convert(cam_nv16.data(),cam_temp.data()) >= 0 ){
									recv(cam_temp, dst_width,dst_height,p);
								}						
							}else{
								if(convert(cam_cache.pop().data(),cam_temp.data()) >= 0 ){
									recv(cam_temp, dst_width,dst_height,p);
								}
							}
							
						}	
					}catch(const std::runtime_error& e){
						// printf("Camera Circular buffer is empty queue!!!!\n");
					}
				}		
			}
		});
		cb_thread.detach();

		printf("开始采集 %s 缓冲区大小 %d \n",type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE?"多平面":"单平面",cam_inf->mmap_size);
		while (thread_run){
			
			int poll_ret = 0;
			poll_ret = poll(&fds,1,5000);
			if(poll_ret == 0){
				printf("pool time out \n");
				if(recvError){
					recvError(poll_ret);
				}
				break;
			} else if (poll_ret < 0){
				printf("pool error \n");
				if(recvError){
					recvError(poll_ret);
				}
				break;
			}
			memset(&video_buff,0,sizeof(struct v4l2_buffer));
			video_buff.memory=V4L2_MEMORY_MMAP;	//内存映射
			video_buff.type=type;				//V4L2视频捕获

			if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
				video_buff.length = VIDEO_MAX_PLANES;
				video_buff.m.planes = planes;
			}

			if(ioctl(camera_fd,VIDIOC_DQBUF,&video_buff)){
				printf("VIDIOC_DQBUF err \n");
				break;
			}
				
			if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
				memcpy(cam_buf.data(), cam_inf->mmapbuf[video_buff.index], video_buff.length);
			} else if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
				memcpy(cam_buf.data(), cam_inf->mmapbuf[video_buff.index], video_buff.m.planes[0].length);
			}
			
			cam_cache.push(cam_buf);

			// printf("push buffer size = %d index = %d size = %d \n",cam_cache.size(),video_buff.index,
			// 						type == V4L2_BUF_TYPE_VIDEO_CAPTURE?video_buff.length:video_buff.m.planes[0].length);

			if(ioctl(camera_fd,VIDIOC_QBUF,&video_buff)){
				printf("VIDIOC_QBUF err \n");
				break;
			}
				
		}
	
		printf("取消映射\n");
		for(int i=0;i<video_requestbuffers.count;i++){
			if (cam_inf->mmapbuf[i]) {
				if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
					munmap(cam_inf->mmapbuf[i], video_buffer.length);
				} else if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
					munmap(cam_inf->mmapbuf[i], video_buffer.m.planes[0].length);
				}
			}	
		}
		
		printf("退出关闭摄像头\n");
		if(ioctl(camera_fd,VIDIOC_STREAMOFF,&type)){
			printf("摄像头关闭失败\n");
		}
		std::unique_lock<std::mutex> lck(mtx);
		cv.notify_all();	
		return 0;
	});
	tread.detach();
	printf("初始化完成\n");
	return cam_inf.get();

error:
	close(camera_fd);
	camera_fd = -1;
	return nullptr;

}

void camera::rk_rga_init(int _src_Width,int _src_Height,int _src_format,int _dst_Width,int _dst_Height,int _dst_format)
{
	src_buf_size = _src_Width * _src_Height * get_bpp_from_format(_src_format);
	dst_buf_size = _dst_Width * _dst_Height * get_bpp_from_format(_dst_format);
	src_width=_src_Width;
	src_height=_src_Height;
	src_format = _src_format;

	dst_width = _dst_Width;
	dst_height = _dst_Height;
	dst_format = _dst_format;

	

	printf("RGA 缩放 图像：尺寸：%d * %d size=%d  目标尺寸：%d * %d size=%d \n",
				src_width,src_height,src_buf_size,dst_width,dst_height,dst_buf_size);
}
int  camera::convert(char *src_buffer,char *dts_buffer)
{
	int ret = 0;
#ifdef TARGET_RK356X
	
	IM_STATUS ret_rga = IM_STATUS_NOERROR;
	im_rect srect;
    im_rect drect;
	rga_buffer_t rga_buf_src;
    rga_buffer_t rga_buf_dst;

    int srcWidth = src_width;
    int srcHeight = src_height;
    int srcFmt = src_format;
	void *src = src_buffer;
    int src_fd = -1;
    void *src_phy = NULL;

    int dstWidth = dst_width;
    int dstHeight = dst_height;
	int dstFmt = dst_format; 
    void *dst =  dts_buffer;
    int dst_fd = -1;
    void *dst_phy = NULL;

	srect.x = 0;
	srect.y = 0;
	srect.width = srcWidth;
	srect.height = srcHeight;

	drect.x = 0;
	drect.y = 0;
	drect.width = dstWidth;
	drect.height = dstHeight;

	rga_buf_src = 
		wrapbuffer_virtualaddr(src, src_width, src_height, src_format, src_width, src_height);

	rga_buf_dst = 
		wrapbuffer_virtualaddr(dst, dst_width, dst_height, dst_format, dst_width, dst_height);

	// printf("RGA src:%d x %d  ====  dst: %d x %d\n",
	// 		srcWidth,srcHeight,dstWidth,dstHeight);		
	ret = imcheck(rga_buf_src, rga_buf_dst, {}, drect);
	if (IM_STATUS_NOERROR != ret) {
		printf("%d, check error! %s \n", __LINE__,  imStrError((IM_STATUS)ret));
		return -1;
	}		
	ret_rga = improcess(rga_buf_src, rga_buf_dst, {}, {}, drect, {}, IM_SYNC);
	if (ret_rga != IM_STATUS_SUCCESS) {
		printf("Error on improcess STATUS=%d\n", ret_rga);
		printf("RGA error message: %s\n", imStrError((IM_STATUS)ret_rga));
		ret = -1;
	}
#else
	if(format__ == PIXEL_YUYV){
		// recv(cam_cache.pop(), cam_inf->width,cam_inf->height,p);
		if(Cam_pielformat_in == V4L2_PIX_FMT_NV21){

		}else if(Cam_pielformat_in  == V4L2_PIX_FMT_YUYV){
			dts_buffer = src_buffer;
		}
	}else if(format__ == PIXEL_NV12){
		if(Cam_pielformat_in == V4L2_PIX_FMT_NV21){
			nv21_to_nv12((unsigned char *)src_buffer,(unsigned char *)dts_buffer,
							cam_inf->width,cam_inf->height);		
		}else if(Cam_pielformat_in  == V4L2_PIX_FMT_YUYV){
			yuyv_to_nv12(src_buffer,dts_buffer,
							cam_inf->width,cam_inf->height);
		}
	}else if(format__ == PIXEL_RGB){
		if(Cam_pielformat_in == V4L2_PIX_FMT_NV21){
			// nv12_to_rgb((unsigned char *)cam_cache.pop().data(),(unsigned char *)cam_temp.data(),
			// 				cam_inf->width,cam_inf->height);
			nv21_to_rgb((unsigned char *)src_buffer,(unsigned char *)dts_buffer,
						cam_inf->width,cam_inf->height);				 		
		}else if(Cam_pielformat_in  == V4L2_PIX_FMT_YUYV){
			yuv_to_rgb(src_buffer,dts_buffer,
							cam_inf->width,cam_inf->height);
		}
		
	}
#endif
	return ret;
}

void camera::rk_rga_deinit()
{
	// releasebuffer_handle(src_handle);
	// releasebuffer_handle(dst_handle);
	// if (src_buf != NULL) {
	// 	free(src_buf);
	// 	src_buf = NULL;
	// }

	// if (dst_buf != NULL) {
	// 	free(dst_buf);
	// 	dst_buf = NULL;
	// }
}
camera::~camera()
{
	camera_deInit();
	
}
void camera::set_quit() 
{
	camera_deInit();
}
void camera::camera_deInit()
{
	if(thread_run){
		thread_run = false;
		
		printf("关闭摄像头 %d * %d \n",cam_inf->height,cam_inf->width);
		std::unique_lock<std::mutex> lck(mtx);
		auto timeout = std::chrono::steady_clock::now() + std::chrono::seconds(5);
		// cv.wait(lck);	set args -i /dev/video0 original-show blurred-no-show fullscreen
		bool finished = cv.wait_until(lck, timeout, 
									[this](){
										return !thread_run;  // 等待直到 thread_run 为 false
									});
		if (!finished) {
			printf("等待摄像头线程超时，强制退出\n");
		}
		if(camera_fd){
			
			close(camera_fd);
			camera_fd = -1;
		}
		
		printf("资源清理完成\n");
	}

}
void camera::yuv_to_rgb(char *yuv_buffer,char *rgb_buffer,int iWidth,int iHeight)
{
	int x;
	int z=0;
	char *ptr = rgb_buffer;
	char *yuyv= yuv_buffer;
	for (x = 0; x < iWidth*iHeight; x++)
	{
		int r, g, b;
		int y, u, v;
		if (!z)
		y = yuyv[0] << 8;
		else
		y = yuyv[2] << 8;
		u = yuyv[1] - 128;
		v = yuyv[3] - 128;
		b = (y + (359 * v)) >> 8;
		g = (y - (88 * u) - (183 * v)) >> 8;
		r = (y + (454 * u)) >> 8;
		*(ptr++) = (b > 255) ? 255 : ((b < 0) ? 0 : b);
		*(ptr++) = (g > 255) ? 255 : ((g < 0) ? 0 : g);
		*(ptr++) = (r > 255) ? 255 : ((r < 0) ? 0 : r);
		if(z++)
		{
			z = 0;
			yuyv += 4;
		}
	}
}
void camera::nv12_to_rgb(unsigned char *nv12_buffer, unsigned char *rgb_buffer, int width, int height) 
{
    int y_size = width * height; // Y 分量的大小
    int uv_size = y_size / 2;    // UV 分量的大小

    unsigned char *y_plane = nv12_buffer;          // Y 分量
    unsigned char *uv_plane = nv12_buffer + y_size; // UV 分量

    for (int y = 0; y < height; y++) {
        for (int x = 0; x < width; x++) {
            // 获取 Y 值
            int y_value = y_plane[y * width + x];

            // 计算 UV 分量的索引
            int uv_index = (y / 2) * (width / 2) + (x / 2);
            int u_value = uv_plane[uv_index * 2];     // U 分量
            int v_value = uv_plane[uv_index * 2 + 1]; // V 分量

            // YUV 转 RGB 公式
            int r = y_value + 1.402 * (v_value - 128);
            int g = y_value - 0.344 * (u_value - 128) - 0.714 * (v_value - 128);
            int b = y_value + 1.772 * (u_value - 128);

            // 限制 RGB 值在 0-255 范围内
            r = (r > 255) ? 255 : ((r < 0) ? 0 : r);
            g = (g > 255) ? 255 : ((g < 0) ? 0 : g);
            b = (b > 255) ? 255 : ((b < 0) ? 0 : b);

            // 将 RGB 值写入输出缓冲区
            rgb_buffer[(y * width + x) * 3] = (unsigned char)r;     // R
            rgb_buffer[(y * width + x) * 3 + 1] = (unsigned char)g; // G
            rgb_buffer[(y * width + x) * 3 + 2] = (unsigned char)b; // B
        }
    }
}
void camera::nv21_to_rgb(unsigned char *nv21_buffer, unsigned char *rgb_buffer, int width, int height) 
{
    int y_size = width * height;
    int uv_size = y_size / 2;

    unsigned char *y_plane = nv21_buffer;          // Y 分量
    unsigned char *uv_plane = nv21_buffer + y_size; // UV 分量

    for (int y = 0; y < height; y++) {
        for (int x = 0; x < width; x++) {
            // 获取 Y 值
            int y_value = y_plane[y * width + x];

            // 计算 UV 分量的索引
            int uv_index = (y / 2) * width + (x / 2) * 2;
            int u_value = uv_plane[uv_index];     // U 分量
            int v_value = uv_plane[uv_index + 1]; // V 分量

            // YUV 转 RGB 公式
            int r = y_value + 1.402 * (v_value - 128);
            int g = y_value - 0.344 * (u_value - 128) - 0.714 * (v_value - 128);
            int b = y_value + 1.772 * (u_value - 128);

            // 限制 RGB 值在 0-255 范围内
            r = (r > 255) ? 255 : ((r < 0) ? 0 : r);
            g = (g > 255) ? 255 : ((g < 0) ? 0 : g);
            b = (b > 255) ? 255 : ((b < 0) ? 0 : b);

            // 将 RGB 值写入输出缓冲区
            rgb_buffer[(y * width + x) * 3] = (unsigned char)r;     // R
            rgb_buffer[(y * width + x) * 3 + 1] = (unsigned char)g; // G
            rgb_buffer[(y * width + x) * 3 + 2] = (unsigned char)b; // B
        }
    }
}

void camera::nv24_to_nv12(uint8_t* nv24, uint8_t* nv12, int width, int height) 
{
    int y_size = width * height;
    int uv_size = y_size / 2;

    uint8_t* y_plane = nv24;                  // Y plane
    uint8_t* uv_plane_nv24 = nv24 + y_size;   // NV24 的 CbCr plane
    uint8_t* uv_plane_nv12 = nv12 + y_size;   // NV12 的 CbCr plane

    // 复制 Y 平面（完全一致）
    memcpy(nv12, y_plane, y_size);

    // 对 CbCr 平面进行 2x2 下采样
    for (int y = 0; y < height / 2; y++) {
        for (int x = 0; x < width / 2; x++) {
            // 在 NV24 中，每个像素都有 CbCr，所以每两个字节取一个 CbCr 对
            int index = (y * width + x) * 2;
            uint8_t cb = uv_plane_nv24[index];
            uint8_t cr = uv_plane_nv24[index + 1];

            // 写入 NV12 的 CbCr 平面，每两个像素写入一个 Cb 和 Cr
            uv_plane_nv12[(y * width / 2 + x) * 2] = cb;
            uv_plane_nv12[(y * width / 2 + x) * 2 + 1] = cr;
        }
    }
}
void camera::nv24_to_rgb888(uint8_t* nv24, uint8_t* rgb, int width, int height) 
{
    int y_size = width * height;
    uint8_t* y_plane = nv24;                  // Y plane
    uint8_t* uv_plane = nv24 + y_size;        // CbCr plane

    for (int y = 0; y < height; y++) {
        for (int x = 0; x < width; x++) {
            int index = y * width + x;
            int uv_index = index * 2;

            int Y = y_plane[index];
            int Cb = uv_plane[uv_index];
            int Cr = uv_plane[uv_index + 1];

            // YUV to RGB 转换公式（BT.601）
            int R = Y + 1.402 * (Cr - 128);
            int G = Y - 0.344 * (Cb - 128) - 0.714 * (Cr - 128);
            int B = Y + 1.772 * (Cb - 128);

            // 限制在 0-255 范围内
            R = (R > 255) ? 255 : ((R < 0) ? 0 : R);
            G = (G > 255) ? 255 : ((G < 0) ? 0 : G);
            B = (B > 255) ? 255 : ((B < 0) ? 0 : B);

            // 存储为 RGB888（顺序 R, G, B）
            rgb[index * 3] = R;
            rgb[index * 3 + 1] = G;
            rgb[index * 3 + 2] = B;
        }
    }
}
void camera::nv24_to_nv21(uint8_t* nv24, uint8_t* nv21, int width, int height) 
{
    int y_size = width * height;
    int uv_size = y_size / 2;

    memcpy(nv21, nv24, y_size);  // Copy Y plane

    uint8_t* uv_in = nv24 + y_size;
    uint8_t* uv_out = nv21 + y_size;

    for (int i = 0; i < uv_size; i += 2) {
        uv_out[i]     = uv_in[i + 1];  // Swap Cb <-> Cr
        uv_out[i + 1] = uv_in[i];
    }
}
void camera::nv24_to_nv16(uint8_t* nv24, uint8_t* nv16, int width, int height) 
{
    int y_size = width * height;
    int uv_size = y_size / 2; // NV16 每两个像素共用一个 CbCr

    uint8_t* y_plane = nv24;                  // Y plane
    uint8_t* uv_plane_nv24 = nv24 + y_size;   // NV24 的 CbCr plane
    uint8_t* uv_plane_nv16 = nv16 + y_size;   // NV16 的 CbCr plane

    // 复制 Y 平面（完全一致）
    memcpy(nv16, y_plane, y_size);

    // 对 CbCr 平面进行 4:4:4 → 4:2:2 下采样
    for (int y = 0; y < height; y++) {
        for (int x = 0; x < width / 2; x++) {
            // 取两个相邻像素的 CbCr 对（假设为平均或直接取第一个）
            int index_2x = (y * width + x * 2);
            uint8_t cb = uv_plane_nv24[index_2x * 2];     // 第一个像素的 Cb
            uint8_t cr = uv_plane_nv24[index_2x * 2 + 1]; // 第一个像素的 Cr

            // 写入 NV16 的 CbCr 平面，每两个像素共用一个 CbCr
            uv_plane_nv16[(y * width / 2 + x) * 2] = cb;
            uv_plane_nv16[(y * width / 2 + x) * 2 + 1] = cr;
        }
    }
}
void camera::nv24_to_nv16_neon(uint8_t* nv24, uint8_t* nv16, int width, int height) 
{
    int y_size = width * height;
    memcpy(nv16, nv24, y_size); // Copy Y plane

    uint8_t* src_uv = nv24 + y_size; // NV24 的 CbCr 平面
    uint8_t* dst_uv = nv16 + y_size; // NV16 的 CbCr 平面

    int uv_total = y_size / 2;

    // NEON 处理（每次处理 8 对 CbCr）
    for (int i = 0; i < uv_total; i += 8) {
        // 加载 8 对 CbCr（共 16 字节）
        uint8x8x2_t uv_pair = vld2_u8(src_uv + i * 2); // 交错加载 CbCr

        // 正确写入交错格式
        vst2_u8(dst_uv + i * 2, uv_pair); // 注意：i * 2 才是正确偏移
    }

    // 剩下的用普通方式补上
    for (int i = (uv_total / 8) * 8; i < uv_total; ++i) {
        dst_uv[i * 2]     = src_uv[i * 2];
        dst_uv[i * 2 + 1] = src_uv[i * 2 + 1];
    }
}
void  camera::yuyv_to_nv12(char *yuyv_in, char * nv12,int width,int height)
{
	// int width = 640; // 6
	// int height = 480; // 8
	int ynum = width * height;  
	int i,j,k=0, n; 

	int total_y = height/2;
	int total_x = width/2;

	// Y
	for(i = 0; i < ynum; i++){
		nv12[i] = yuyv_in[i<<1]; // i*2
    }
	n = 0;
	for(i=0; i<total_y; i++){
		for(j=0; j<total_x; j++){
			//U  保证 uv 能够对准 411模式下 进行隔4行取一次UV 保证匹配
			nv12[ynum + n] = yuyv_in[k*4*width + 1 + j*4];
			// V
			nv12[ynum + n-1] = yuyv_in[k*4*width + 3 + j*4];
			n = n + 2;
		}
		k++;
	}
}


void camera::nv21_to_nv12(unsigned char *nv21_buffer, unsigned char *nv12_buffer, int width, int height) 
{
    int y_size = width * height;
    int uv_size = y_size / 2;

    memcpy(nv12_buffer, nv21_buffer, y_size);

    unsigned char *nv21_uv = nv21_buffer + y_size;
    unsigned char *nv12_u = nv12_buffer + y_size;
    unsigned char *nv12_v = nv12_u + uv_size / 2;

    for (int i = 0; i < uv_size; i += 2) {
        *nv12_u++ = nv21_uv[i + 1];
        *nv12_v++ = nv21_uv[i];
    }
}

