/*********************************************************************************
 *      Copyright:  (C) 2024 YuanLian IoT Project development
 *                  All rights reserved.
 *
 *       Filename:  rk3568_opencv_QR_video.cpp
 *    Description:  This file
 *
 *        Version:  1.0.0(01/02/24)
 *         Author:  wang aifei <wangaifei163163@163.com>
 *      ChangeLog:  1, Release initial version on "01/02/24 15:34:41"
 *
 ********************************************************************************/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <getopt.h>           
#include <fcntl.h>             
#include <unistd.h>
#include <errno.h>
#include <malloc.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <asm/types.h>         
#include <linux/videodev2.h>
#include <ctime>
#include <iostream>

#include "opencv2/opencv.hpp"
#include <opencv2/highgui.hpp>


#include "zbar.h"

using namespace std;
using namespace cv;


/* 注，屏幕尺寸要大于帧的尺寸 */
#define FRAME_WIDTH     640
#define FRAME_HEIGH     480

#define LCD_WIDTH       1080
#define LCD_HEIGH       1920

#define COUNT 		4


/* 所申请的单个 buffer 结构体 
 * 包含 buffer 的起始地址和长度
 */
struct v4l2_buffer_unit 
{
	void           *start;
	size_t         length;
}; 

int                        	fd_camera = -1;
int                        	fd_lcd = -1;
void                       	*screen_base = NULL;
struct v4l2_buffer_unit   	*buffer_unit = NULL;
void                       	*rgb_buffer = NULL;

/* yuv 格式转为 rgb 格式的算法
 * 将 yuv422 格式的帧数据转换为 rgb32格式，以显示在 lcd 屏幕上 
 */

int clamp(int value, int min, int max) {
    if (value < min) {
        return min;
    } else if (value > max) {
        return max;
    } else {
        return value;
    }
}

//NV12转RGBA
void NV12_T_RGBA(int width, int height,  unsigned char* YUV, unsigned char* rgba)
{
    int frameSize = width * height;

    for (int j = 0; j < height; j++) {
        for (int i = 0; i < width; i++) {
            // Y值
            int Y = YUV[j * width + i];
            // UV值
            int U = YUV[frameSize + (j / 2) * width + (i & ~1)];
            int V = YUV[frameSize + (j / 2) * width + (i & ~1) + 1];

	    int B = clamp((int)Y + 1.13983 * (V - 128), 0, 255);
	    int G = clamp((int)Y - 0.39465 * (U - 128) - 0.58060 * (V - 128), 0, 255);
	    int R = clamp((int)Y + 2.03211 * (U - 128), 0, 255);

            // 存储RGBA数据，将Alpha通道设置为0
            rgba[(j * width + i) * 4] = R;
            rgba[(j * width + i) * 4 + 1] = G;
            rgba[(j * width + i) * 4 + 2] = B;
            rgba[(j * width + i) * 4 + 3] = 255;
        }
    }
}


void yuv422_rgb32(const unsigned char* yuv422_buffer, unsigned char* rgb32_buffer, int width, int height) {
    int yuv_index = 0;
    int rgb_index = 0;

    for (int i = 0; i < height; i++) {
        for (int j = 0; j < width; j += 2) {
            unsigned char y0 = yuv422_buffer[yuv_index++];
            unsigned char u = yuv422_buffer[yuv_index++];
            unsigned char y1 = yuv422_buffer[yuv_index++];
            unsigned char v = yuv422_buffer[yuv_index++];

            int c = y0 - 16;
            int d = u - 128;
            int e = v - 128;

            // Convert YUV to RGB
            int r0 = (298 * c + 409 * e + 128) >> 8;
            int g0 = (298 * c - 100 * d - 208 * e + 128) >> 8;
            int b0 = (298 * c + 516 * d + 128) >> 8;

            c = y1 - 16;

            // Convert YUV to RGB
            int r1 = (298 * c + 409 * e + 128) >> 8;
            int g1 = (298 * c - 100 * d - 208 * e + 128) >> 8;
            int b1 = (298 * c + 516 * d + 128) >> 8;

            // Clip RGB values to the valid range (0-255)
            r0 = (r0 < 0) ? 0 : ((r0 > 255) ? 255 : r0);
            g0 = (g0 < 0) ? 0 : ((g0 > 255) ? 255 : g0);
            b0 = (b0 < 0) ? 0 : ((b0 > 255) ? 255 : b0);
            r1 = (r1 < 0) ? 0 : ((r1 > 255) ? 255 : r1);
            g1 = (g1 < 0) ? 0 : ((g1 > 255) ? 255 : g1);
            b1 = (b1 < 0) ? 0 : ((b1 > 255) ? 255 : b1);

            // Store RGB32 pixel data (4 bytes per pixel)
            rgb32_buffer[rgb_index++] = (unsigned char)b0;
            rgb32_buffer[rgb_index++] = (unsigned char)g0;
            rgb32_buffer[rgb_index++] = (unsigned char)r0;
            rgb32_buffer[rgb_index++] = 0xFF; // Alpha channel

            rgb32_buffer[rgb_index++] = (unsigned char)b1;
            rgb32_buffer[rgb_index++] = (unsigned char)g1;
            rgb32_buffer[rgb_index++] = (unsigned char)r1;
            rgb32_buffer[rgb_index++] = 0xFF; // Alpha channel
        }
    }
}

void nv12_to_rgb32(const unsigned char* nv12_buffer, unsigned char* rgb32_buffer, int width, int height) {
    int y_index = 0;
    int uv_index = width * height;
    int rgb32_index = 0;

    for (int i = 0; i < height; i++) {
        for (int j = 0; j < width; j++) {
            int y = nv12_buffer[y_index++];
            int u = nv12_buffer[uv_index++];
            int v = nv12_buffer[uv_index++];

            int c = y - 16;
            int d = u - 128;
            int e = v - 128;

            // Convert YUV to RGB
            int r = (298 * c + 409 * e + 128) >> 8;
            int g = (298 * c - 100 * d - 208 * e + 128) >> 8;
            int b = (298 * c + 516 * d + 128) >> 8;

            // Clip RGB values to the valid range (0-255)
            r = (r < 0) ? 0 : ((r > 255) ? 255 : r);
            g = (g < 0) ? 0 : ((g > 255) ? 255 : g);
            b = (b < 0) ? 0 : ((b > 255) ? 255 : b);

            // Store RGB32 pixel data (4 bytes per pixel)
            rgb32_buffer[rgb32_index++] = (unsigned char)b;
            rgb32_buffer[rgb32_index++] = (unsigned char)g;
            rgb32_buffer[rgb32_index++] = (unsigned char)r;
            rgb32_buffer[rgb32_index++] = 0xFF; // Alpha channel
        }
    }
}

/* 打开屏幕和摄像头的设备节点，并映射 lcd 的用户空间到内核空间 */
int init_camera_lcd()
{
	fd_camera = open("/dev/video0", O_RDWR);
	if(fd_camera < 0)
	{
		printf("%s : open camera error\n", __FUNCTION__);
		return -1;
	}

#if 1
	fd_lcd = open("/dev/fb0", O_RDWR); 
	if(fd_lcd < 0)
	{
		printf("%s : open lcd error\n", __FUNCTION__);	
		return -2;
	}

	screen_base = mmap(NULL, (LCD_WIDTH+8)*LCD_HEIGH*4, PROT_READ|PROT_WRITE, MAP_SHARED, fd_lcd, 0); 
	if(NULL == screen_base)
	{
		printf("%s : framebuffer mmap error\n", __FUNCTION__);
		return -3;
	}

	memset(screen_base, 0x0, (LCD_WIDTH+8)*LCD_HEIGH*4);
#endif

	rgb_buffer = malloc(FRAME_WIDTH*FRAME_HEIGH*4);


	return 0;
}

/* 查询设备功能属性 */
int v4l2_query_capability()
{ 
	struct v4l2_capability     cap;
	int                        ret;

	ret = ioctl(fd_camera, VIDIOC_QUERYCAP, &cap);
	if(ret < 0)
	{
		printf("%s : VIDIOC_QUERYCAP error\n", __FUNCTION__);
		return -1;
	}

	/* #define V4L2_CAP_VIDEO_CAPTURE 0x00000001 
	 * 获取到设备的能力
	 * 判断最低位是否为 1，如果为 1，则说明该设备具有视频采集能力
	 */
	if ((V4L2_CAP_VIDEO_CAPTURE_MPLANE & cap.capabilities) == 0x00)
	{
		printf("%s : no capture device\n", __FUNCTION__);
		return -2;
	}

	return 0;
}

/* 列举设备支持的数据格式 */
int v4l2_enum_format()
{ 
	int                     ret = 0;   
	int                     found = 0;                
	struct v4l2_fmtdesc     fmtdesc;

	/* 枚举摄像头所支持的所有像素格式以及描述信息 */
	fmtdesc.index = 0;
	fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;

	while(found == 0 || ret == 0)
	{
		ret = ioctl(fd_camera, VIDIOC_ENUM_FMT, &fmtdesc);

		if(fmtdesc.pixelformat = V4L2_PIX_FMT_NV12)
		{
			found = 1;
		}

		fmtdesc.index++;

	}

	if(found != 1)
	{
		printf("%s : device don't support V4L2_PIX_FMT_NV12\n", __FUNCTION__);
		return -1;
	}
	else
	{
		printf("device support V4L2_PIX_FMT_NV12\n");
	}

	return 0;
}

/* 获取帧数据格式 
 * 主要获取帧数据的宽和高用于之后的设置
 */
void v4l2_get_format()
{
	int                  ret;
	struct v4l2_format   format;

	format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;

	ret = ioctl(fd_camera, VIDIOC_G_FMT, &format);
	if(ret < 0)
	{
		printf("%s : VIDIOC_G_FMT error\n", strerror(errno));
	}

	printf("width:%d height:%d\n", format.fmt.pix_mp.width, format.fmt.pix_mp.height);
	printf("fmt.type:\t\t%d\n",format.type);
	printf("pix.pixelformat:\t%c%c%c%c\n",format.fmt.pix_mp.pixelformat & 0xFF, \
			(format.fmt.pix_mp.pixelformat >> 8) & 0xFF,\
			(format.fmt.pix_mp.pixelformat >> 16) & 0xFF, \
			(format.fmt.pix_mp.pixelformat >> 24) & 0xFF);
	printf("pix.height:\t\t%d\n",format.fmt.pix_mp.height);
	printf("pix.width:\t\t%d\n",format.fmt.pix_mp.width);
	printf("pix.field:\t\t%d\n",format.fmt.pix_mp.field);

}

/* 获取帧数据格式 */
void v4l2_set_format()
{
	int                   ret;
	struct v4l2_format    format;

	format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	format.fmt.pix_mp.width = FRAME_WIDTH;
	format.fmt.pix_mp.height = FRAME_HEIGH;
	format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12;
	format.fmt.pix_mp.field =  V4L2_FIELD_ANY;
	format.fmt.pix_mp.num_planes = 1;

	ret = ioctl(fd_camera, VIDIOC_S_FMT, &format);
	if(ret <  0)
	{
		printf("%s : VIDIOC_S_FMT error\n", strerror(errno));
	}

	cout<<"Set format ok!\n"<<endl;
}

/* 申请帧数据缓冲区 */
int v4l2_require_buffer()
{
	int                         ret;
	struct v4l2_requestbuffers  req;

	req.count = COUNT;
	req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	req.memory = V4L2_MEMORY_MMAP;

	ret = ioctl(fd_camera, VIDIOC_REQBUFS, &req);
	if(ret < 0)
	{
		printf("%s : VIDIOC_REQBUFS error\n", strerror(errno));
		return -1;
	}

	printf("require buffer ok\n");


	return 0;
}

/* 查询申请到的 buffer 信息，并映射到用户空间 */
int v4l2_query_buffer()
{
	int                  		ret; 
	int                  		count;  
	struct v4l2_buffer   		buf;
	struct v4l2_format    		format;
	struct v4l2_plane 		planes[VIDEO_MAX_PLANES];


	/* 分配 4 个大小为 v4l2_buffer_unit 结构体大小的 buffer */
	buffer_unit = (v4l2_buffer_unit *)calloc(COUNT, sizeof(*buffer_unit));
	if(!buffer_unit)
	{
		printf("%s : calloc buffer_unit error\n", __FUNCTION__);
	}

	/* 获取之前申请的 v4l2_requestbuffers 的信息
	 * 并将这些信息传给用户空间的 buffer_unit
	 * 然后将 buffer_unit 映射到内核中申请的 buffer 上去
	 * 映射过程是按照，申请 buffer 的编号一个一个映射
	 */
	for(count=0; count<COUNT; count++)
	{   
		memset(&buf,0,sizeof(buf));
		memset(planes,0,sizeof(planes));

		buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
		buf.memory = V4L2_MEMORY_MMAP;
		buf.index = count;
		buf.m.planes = planes;
		buf.length = VIDEO_MAX_PLANES;

		ret = ioctl(fd_camera, VIDIOC_QUERYBUF, &buf);
		if(ret < 0)
		{
			printf("%s : VIDIOC_QUERYBUF error\n", strerror(errno));
			return -1;
		}

		buffer_unit[count].length = buf.m.planes[0].length;
		buffer_unit[count].start = mmap(NULL, buf.m.planes[0].length, PROT_READ | PROT_WRITE, MAP_SHARED, fd_camera, buf.m.planes[0].m.mem_offset);

		if (NULL == buffer_unit[count].start)
		{	
			printf("%s : mmap buffer_unit error\n", __FUNCTION__);	 
			return -2;
		}	 

	}

	printf("mmap!\n");

	return 0;
}

/* 开始视频数据采集，并将 buffer 入队 */
int v4l2_stream_on()
{ 
	int                     	i;
	int                     	ret;
	enum  v4l2_buf_type     	type;
	struct v4l2_buffer      	buffer;
	struct v4l2_plane 		planes[VIDEO_MAX_PLANES];

	memset(planes,0,sizeof(planes));

	for(i=0; i<COUNT; i++)
	{
		buffer.m.planes = planes;
		buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
		buffer.memory = V4L2_MEMORY_MMAP;  
		buffer.index = i; 
		buffer.length = 1;
		buffer.flags = 0;

		ret=ioctl(fd_camera, VIDIOC_QBUF, &buffer);
		if(ret < 0)
		{
			printf("%s : VIDIOC_QBUF error\n", __FUNCTION__);	
			return -1;
		}

	}	

	type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	ret = ioctl(fd_camera, VIDIOC_STREAMON, &type);
	if(ret < 0)
	{
		printf("%s : VIDIOC_STREAMON error\n", __FUNCTION__);	
		return -2;
	}

	printf("stream on\n");

	return 0;
}

/* 从帧缓冲队列中取出一个缓冲区数据，并显示到屏幕上 */
int v4l2_dequeue_buffer()
{
	int                  		ret;
	int                  		i;
	struct v4l2_buffer   		buffer;
	char		       		*base;
	char		       		*start;

	zbar::ImageScanner              scanner;
	cv::Mat                         imageGray;
	int                             QR_count = 0;
	int                             width = 0;
	int                             height = 0;
	uchar                           *rows = NULL;
	struct v4l2_plane 		planes[VIDEO_MAX_PLANES];
	struct timespec                 detect_startTime, detect_endTime;

//	printf("111111111111111\n");

	memset(planes,0,sizeof(planes));

	buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	buffer.memory = V4L2_MEMORY_MMAP;
	buffer.m.planes = planes;
	buffer.length = 1;

	clock_gettime(CLOCK_MONOTONIC, &detect_startTime);

	ret = ioctl(fd_camera, VIDIOC_DQBUF, &buffer);
	if(ret < 0)
	{
		printf("%s : VIDIOC_STREAMON error\n",strerror(errno));	
		return -1;
	}

	/* 如果 buffer.index < 4 为假，则会打印 
	 * 因为，之前只开辟了 4 个 buffer_uint，编号从 0 到 3
	 */
	assert(buffer.index < COUNT);

	/*+-----------------------------------+
	 *                                    *
	 *opencv读取内存中的帧，zbar识别二维码*
	 *                                    * 
	 +------------------------------------+*/

#if 1
	cv::Size szSize(640,480);

	cv::Mat imgMat(szSize, CV_8UC3);
	cv::Mat srcMat(480*3/2,640, CV_8UC1, buffer_unit[buffer.index].start);
	cvtColor(srcMat, imgMat, cv::COLOR_YUV2BGR_NV12);
//	cv::imshow("camera 0", imgMat);
//	cv::waitKey(1); // Wait for 'esc' key press to exit

	scanner.set_config(zbar::ZBAR_NONE,zbar::ZBAR_CFG_ENABLE,1);
	cv::cvtColor(imgMat,imgMat,cv::COLOR_BGR2GRAY);
	width = imgMat.cols;
	height = imgMat.rows;

	rows = (uchar *)imgMat.data;

	zbar::Image QRimage(width,height,"Y800",rows,width*height);

	QR_count = scanner.scan(QRimage);

	if(QR_count < 0)
	{
		std::cout<<"ERROR:zbar::scan!"<<std::endl;

		return -1;
	}

	zbar::Image::SymbolIterator     symbol = QRimage.symbol_begin();

	clock_gettime(CLOCK_MONOTONIC, &detect_endTime); // 获取结束时间
	long detect_elapsedTime = (detect_endTime.tv_sec - detect_startTime.tv_sec) * 1000; // 秒转换为毫秒
        detect_elapsedTime += (detect_endTime.tv_nsec - detect_startTime.tv_nsec) / 1000000; // 纳秒转换为毫秒




#if 0
	if(QRimage.symbol_begin() == QRimage.symbol_end())
	{
		cout<<endl<<"Failed to query the barcode, please check the picture!"<<endl;

		//continue;

		//      return -1;
	}
#endif
	for( ;symbol != QRimage.symbol_end();++symbol)
	{
		cout<<endl<<"The number of QR codes is:"<<QR_count<<endl;
		cout<<"QR_code:"<<symbol->get_data()<<endl;
		cout<<"time:"<<detect_elapsedTime<<"ms"<<endl;
	}
#endif

	/*+-----------------------------------+
	 *                                    *
	 *             lcd显示                *
	 *                                    * 
	 +------------------------------------+*/


#if 1
	/* 将数据转换格式后，放到屏幕申请 frame buffer 的空间中 */
//	yuv422_rgb32((const unsigned char*)buffer_unit[buffer.index].start, (unsigned char*)rgb_buffer, FRAME_WIDTH, FRAME_HEIGH);   
//	nv12_to_rgb32((const unsigned char* )buffer_unit[buffer.index].start, (unsigned char* )rgb_buffer, FRAME_WIDTH, FRAME_HEIGH);
	NV12_T_RGBA(FRAME_WIDTH,FRAME_HEIGH, ( unsigned char*)buffer_unit[buffer.index].start,(unsigned char* )rgb_buffer);

	for (i=0, base=(char *)screen_base, start=(char *)rgb_buffer; i<FRAME_HEIGH; i++) 
	{
		memcpy(base, start, FRAME_WIDTH * 4);     // RGB565 一个像素占 2 个字节
//		base += LCD_WIDTH;                        // lcd 显示指向下一行
		base += (LCD_WIDTH+8)*4;
		start += FRAME_WIDTH*4;                     // 指向下一行数据
	}



	memset(rgb_buffer, 0x0, FRAME_WIDTH*FRAME_HEIGH*4);
#endif

	ioctl(fd_camera, VIDIOC_QBUF, &buffer); 

	return 0;
}

/* 用 select 监听数据 
 * 有数据到来，就从缓冲队列中取出数据
 */
int v4l2_select()
{
	while(1)
	{
		int                  ret;
		struct timeval       tv;
		fd_set               fds;

		tv.tv_sec = 2;
		tv.tv_usec = 0;

		FD_ZERO(&fds);
		FD_SET(fd_camera, &fds);

		ret = select(fd_camera+1, &fds, NULL, NULL, &tv);
		if(ret < 0)
		{
			if(errno = EINTR)
			{
				printf("%s : select error\n", __FUNCTION__);
			} 
			return -1;
		}
		if(0 == ret)
		{
			printf("%s : select timeout\n", __FUNCTION__);
			return -2;
		}

		v4l2_dequeue_buffer();
	}

	return 0;
}

/* 结束视频数据采集 */
int v4l2_stream_off()
{
	int                   ret;
	enum v4l2_buf_type    type;

	type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	ret = ioctl(fd_camera, VIDIOC_STREAMOFF, &type);
	if(ret < 0)
	{
		printf("%s : VIDIOC_STREAMOFF error\n", __FUNCTION__);
		return -1;
	}

	return 0;
}     

/* 解除 buffer_unit 到内核中申请 buffer 的映射 */
void v4l2_unmmap()
{
	int         i;
	int         ret;

	free(rgb_buffer);

	for(i=0; i<COUNT; i++)
	{
		ret = munmap(buffer_unit[i].start, buffer_unit[i].length);
		if (ret < 0)
		{
			printf("%s : munmap error\n", __FUNCTION__);
		}
	}
}

int main(int argc, char **argv)
{

	init_camera_lcd();

	/* 获取设备的能力和帧数据格式，并设置数据格式 */
	v4l2_query_capability();
	v4l2_enum_format();
	v4l2_set_format();
	v4l2_get_format();

	/* 在内核中申请帧缓冲区
	 * 并获取缓冲区信息，用以将用户空间的内存映射到内核空间
	 */
	v4l2_require_buffer();
	v4l2_query_buffer();

	/* 开始视频数据采集
	 * 使用 select 监听数据，有数据则从帧缓冲队列取出数据
	 * 采集结束后，停止采集，并解除映射，关闭文件描述符
	 */
	v4l2_stream_on();
	v4l2_select();
	v4l2_stream_off();
	v4l2_unmmap(); 

	close(fd_camera);
	close(fd_lcd);

	return 0;
}
