/***************************************************************
 Copyright © ALIENTEK Co., Ltd. 1998-2021. All rights reserved.
 文件名 : v4l2_camera.c
 作者 : 邓涛
 版本 : V1.0
 描述 : V4L2摄像头应用编程实战
 其他 : 无
 论坛 : www.openedv.com
 日志 : 初版 V1.0 2021/7/09 邓涛创建
 ***************************************************************/

/* C std headers */
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#include <signal.h>
#include <fcntl.h>
#include <unistd.h>
#include <string.h>
#include <errno.h>
/* sys headers */
#include <sys/mman.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/ioctl.h>
#include <sys/wait.h>
#include <sys/time.h>
/* linux os headers */
#include <linux/videodev2.h>
#include <linux/fb.h>
/* jpeglib */
#include "jpeglib.h"
#include "main.h"

#define FB_DEV              "/dev/fb0"      //LCD设备节点
#define FRAMEBUFFER_COUNT   5               //帧缓冲数量

// 文件保存方法
void SaveRam(const char* filePath, const uint8_t* data, size_t dataLen);
// 绘制yuyv图像
void DrawYUYV(const uint8_t* pBase, size_t dataLen);
// 绘制jpeg文件方法
void DrawJpgFile(const char* filePath);
void DrawJpg(void* pBase, size_t dataLen);
int display_logo(void);
extern void LvglDeployGRAM(void);
extern void LvglDrawPoint(int h, int w, uint8_t r, uint8_t g, uint8_t b);
extern uint8_t g_lvglGram[CAPTURE_H][CAPTURE_W][3];

/*** 摄像头像素格式及其描述信息 ***/
typedef struct camera_format {
    unsigned char description[32];  //字符串描述信息
    unsigned int pixelformat;       //像素格式
} cam_fmt;

/*** 描述一个帧缓冲的信息 ***/
typedef struct cam_buf_info {
    unsigned short *start;      //帧缓冲起始地址
    unsigned long length;       //帧缓冲长度
} cam_buf_info;

static int g_lcdWidth;                       //LCD宽度
static int g_lcdHeight;                      //LCD高度
static unsigned short *g_pGramBase = NULL;//LCD显存基地址
static int fb_fd = -1;                  //LCD设备文件描述符
static int v4l2_fd = -1;                //摄像头设备文件描述符
static cam_buf_info g_bufInfos[FRAMEBUFFER_COUNT];
static cam_fmt cam_fmts[10];
int frm_width, frm_height, fmt_type;   //视频帧宽度和高度

static int fb_dev_init(void)
{
    struct fb_var_screeninfo fb_var = {0};
    struct fb_fix_screeninfo fb_fix = {0};
    unsigned long screen_size;

    /* 打开framebuffer设备 */
    fb_fd = open(FB_DEV, O_RDWR);
    if (0 > fb_fd) {
        LOG("open error: %s: %s\n", FB_DEV, strerror(errno));
        return -1;
    }

    /* 获取framebuffer设备信息 */
    ioctl(fb_fd, FBIOGET_VSCREENINFO, &fb_var);
    ioctl(fb_fd, FBIOGET_FSCREENINFO, &fb_fix);

    screen_size = fb_fix.line_length * fb_var.yres;
    g_lcdWidth = fb_var.xres;
    g_lcdHeight = fb_var.yres;
    LOG("LCD line_length:%d\n", fb_fix.line_length);
    LOG("LCD Size:%d*%d, size=%u\n", g_lcdWidth, g_lcdHeight, screen_size);

    /* 内存映射 */
    g_pGramBase = mmap(NULL, screen_size, PROT_READ | PROT_WRITE, MAP_SHARED, fb_fd, 0);
    if (MAP_FAILED == (void *)g_pGramBase) {
        perror("mmap error");
        close(fb_fd);
        return -1;
    }

    /* LCD背景刷白 */
    memset(g_pGramBase, 0x00, screen_size);
    memset(g_pGramBase, 0xFF, screen_size);
    memset(g_pGramBase, 0xa0, screen_size);

    return 0;
}

static int v4l2_dev_init(const char *device)
{
    struct v4l2_capability cap = {0};

    /* 打开摄像头 */
    v4l2_fd = open(device, O_RDWR);
    if (0 > v4l2_fd) {
        LOG("open error: %s: %s\n", device, strerror(errno));
        return -1;
    }

    /* 查询设备功能 */
    ioctl(v4l2_fd, VIDIOC_QUERYCAP, &cap);

    /* 判断是否是视频采集设备 */
    if (!(V4L2_CAP_VIDEO_CAPTURE & cap.capabilities)) {
        LOG("Error: %s: No capture video device!\n", device);
        close(v4l2_fd);
        return -1;
    }

    return 0;
}

static void v4l2_enum_formats(void)
{
    struct v4l2_fmtdesc fmtdesc = {0};

    /* 枚举摄像头所支持的所有像素格式以及描述信息 */
    fmtdesc.index = 0;
    fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    while (0 == ioctl(v4l2_fd, VIDIOC_ENUM_FMT, &fmtdesc)) {

        // 将枚举出来的格式以及描述信息存放在数组中
        cam_fmts[fmtdesc.index].pixelformat = fmtdesc.pixelformat;
        strcpy(cam_fmts[fmtdesc.index].description, fmtdesc.description);
        fmtdesc.index++;
    }
}

static void v4l2_print_formats(void)
{
    struct v4l2_frmsizeenum frmsize = {0};
    struct v4l2_frmivalenum frmival = {0};
    int i;

    frmsize.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    frmival.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    for (i = 0; cam_fmts[i].pixelformat; i++) {

        LOG("format<0x%x>, description<%s>\n", cam_fmts[i].pixelformat,
                    cam_fmts[i].description);

        /* 枚举出摄像头所支持的所有视频采集分辨率 */
        frmsize.index = 0;
        frmsize.pixel_format = cam_fmts[i].pixelformat;
        frmival.pixel_format = cam_fmts[i].pixelformat;
        while (0 == ioctl(v4l2_fd, VIDIOC_ENUM_FRAMESIZES, &frmsize)) {

            LOG("size<%d*%d> ",
                    frmsize.discrete.width,
                    frmsize.discrete.height);
            frmsize.index++;

            /* 获取摄像头视频采集帧率 */
            frmival.index = 0;
            frmival.width = frmsize.discrete.width;
            frmival.height = frmsize.discrete.height;
            while (0 == ioctl(v4l2_fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmival)) {

                LOG("<%dfps>", frmival.discrete.denominator /
                        frmival.discrete.numerator);
                frmival.index++;
            }
            LOG("\n");
        }
        LOG("\n");
    }
}

static int v4l2_set_format(uint32_t format)
{
    struct v4l2_format fmt = {0};
    struct v4l2_streamparm streamparm = {0};

    /* 设置帧格式 */
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;//type类型
    fmt.fmt.pix.width = g_lcdWidth;  //视频帧宽度
    fmt.fmt.pix.height = g_lcdHeight;//视频帧高度
    fmt.fmt.pix.pixelformat = format;  //像素格式
    if (0 > ioctl(v4l2_fd, VIDIOC_S_FMT, &fmt)) {
        LOG("ioctl error: VIDIOC_S_FMT: %s\n", strerror(errno));
        return -1;
    }

    /*** 判断是否已经设置为我们要求的RGB565像素格式
    如果没有设置成功表示该设备不支持RGB565像素格式 */
    if (format != fmt.fmt.pix.pixelformat) {
        LOG("Error: the device does not support format 0x%08x\n", format);
		if (format != V4L2_PIX_FMT_YUYV) {
        	LOG("degrade to V4L2_PIX_FMT_YUYV\n");
			return v4l2_set_format(V4L2_PIX_FMT_YUYV);
		}
        return -1;
    }

    frm_width = fmt.fmt.pix.width;  //获取实际的帧宽度
    frm_height = fmt.fmt.pix.height;//获取实际的帧高度
	fmt_type = fmt.fmt.pix.pixelformat;//获取实际的帧格式
    LOG("succeed: cam capture config to resolution %u:%u,fmt = 0x%08x \r\n", frm_width, frm_height, fmt_type);

    /* 获取streamparm */
    streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    ioctl(v4l2_fd, VIDIOC_G_PARM, &streamparm);

    /** 判断是否支持帧率设置 **/
    if (V4L2_CAP_TIMEPERFRAME & streamparm.parm.capture.capability) {
        streamparm.parm.capture.timeperframe.numerator = 1;
        streamparm.parm.capture.timeperframe.denominator = 30;//30fps
        if (0 > ioctl(v4l2_fd, VIDIOC_S_PARM, &streamparm)) {
            LOG("ioctl error: VIDIOC_S_PARM: %s\n", strerror(errno));
            return -1;
        }
		
		struct v4l2_streamparm Stream_Parm;
		Stream_Parm.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
		if(ioctl(v4l2_fd,VIDIOC_G_PARM,&Stream_Parm)==-1) {
			return -1;
		}
		LOG("Frame rate: %u/%u\n", Stream_Parm.parm.capture.timeperframe.numerator, Stream_Parm.parm.capture.timeperframe.denominator);
    }

    return 0;
}

static int v4l2_init_buffer(void)
{
    struct v4l2_requestbuffers reqbuf = {0};
    struct v4l2_buffer buf = {0};

    /* 申请帧缓冲 */
    reqbuf.count = FRAMEBUFFER_COUNT;       //帧缓冲的数量
    reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    reqbuf.memory = V4L2_MEMORY_MMAP;
    if (0 > ioctl(v4l2_fd, VIDIOC_REQBUFS, &reqbuf)) {
        LOG("ioctl error: VIDIOC_REQBUFS: %s\n", strerror(errno));
        return -1;
    }

    /* 建立内存映射 */
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;
    for (buf.index = 0; buf.index < FRAMEBUFFER_COUNT; buf.index++) {

        ioctl(v4l2_fd, VIDIOC_QUERYBUF, &buf);
        g_bufInfos[buf.index].length = buf.length;
        g_bufInfos[buf.index].start = mmap(NULL, buf.length,
                PROT_READ | PROT_WRITE, MAP_SHARED,
                v4l2_fd, buf.m.offset);
        if (MAP_FAILED == g_bufInfos[buf.index].start) {
            perror("mmap error");
            return -1;
        }
    }

    /* 入队 */
    for (buf.index = 0; buf.index < FRAMEBUFFER_COUNT; buf.index++) {

        if (0 > ioctl(v4l2_fd, VIDIOC_QBUF, &buf)) {
            LOG("ioctl error: VIDIOC_QBUF: %s\n", strerror(errno));
            return -1;
        }
    }

    return 0;
}

static int v4l2_stream_on(void)
{
    /* 打开摄像头、摄像头开始采集数据 */
    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if (0 > ioctl(v4l2_fd, VIDIOC_STREAMON, &type)) {
        LOG("ioctl error: VIDIOC_STREAMON: %s\n", strerror(errno));
        return -1;
    }

    return 0;
}

uint8_t qRound(int r0)
{
    return r0 > 255 ? 255 : (r0 < 0 ? 0 : r0);
}

uint32_t RGB888(uint8_t R, uint8_t G, uint8_t B)
{
    return (R << 16) | (G << 8) | B;
}

uint32_t yuv2rgb888(int y, int u, int v)
{
    // 76,84,255  -> 255,0,0
    // 149,43,21  -> 0,255,0
    // 29,255,107 -> 0,0,255
    u = u - 128;
    v = v - 128;

    int R = y + v + ((v * 103) >> 8);
    int G = y - ((u * 88) >> 8) - ((v * 183) >> 8);
    int B = y + u +((u * 198) >> 8);

    R = qRound(R);
    G = qRound(G);
    B = qRound(B);
    return RGB888(R, G, B);
}

void DrawPoint(int x, int y, uint32_t color)
{
	LvglDrawPoint(y, x, color >> 16, color >> 8, color);
}

void DrawYuv(uint16_t* pBase, size_t xSize, size_t ySize)
{
    for (size_t picY = 0; picY < ySize ; picY++) {
        for (size_t picX = 0; picX < xSize; picX += 2) {
            uint16_t pix0 = pBase[picY * xSize + picX];
            uint16_t pix1 = pBase[picY * xSize + picX + 1];
            uint8_t y0 = pix0 >> 8;
            uint8_t y1 = pix1 >> 8;
            uint8_t u = pix0 & 0xff;
            uint8_t v = pix1 & 0xff;

            DrawPoint(picX,     picY, yuv2rgb888(y0, u, v));
            DrawPoint(picX + 1, picY, yuv2rgb888(y1, u, v));
        }
    }
}

void DrawCursor(int xCenter, int yCenter, uint32_t color)
{
    const int size = 50;
    for (int x = xCenter - size; x < xCenter + size; x++) {
        DrawPoint(x, yCenter, color);
    }
    for (int y = yCenter - size; y < yCenter + size; y++) {
        DrawPoint(xCenter,y, color);
    }
}

void DrawJpg(void* pBase, size_t dataLen)
{
    //(1)为jpeg对象分配空间并初始化
	struct jpeg_decompress_struct cinfo;	//解压jpeg的对象结构体
	struct jpeg_error_mgr jerr;				//定义错误结构体
	
	cinfo.err = jpeg_std_error(&jerr);		//错误处理结构体绑定
	jpeg_create_decompress(&cinfo);			//初始化jpeg的对象结构体

	//(2)指定解压缩数据源
	jpeg_mem_src(&cinfo, pBase, dataLen);//指定解压缩数据源

	//(3)获取文件信息
	jpeg_read_header(&cinfo, TRUE);
	
	//(4)为解压缩设定参数，包括图像大小，颜色空间
	int n = 1;			//缩小倍数

	//设定的缩小倍数
	cinfo.scale_num = 1;		//分子
	cinfo.scale_denom = n;		//分母
	cinfo.out_color_space = JCS_RGB;	//颜色空间

	//(5)开始解压缩
	jpeg_start_decompress(&cinfo);

	//(6)取出数据（做相关的应用），安装一行一行去读取的
    int h = 0;
	while(cinfo.output_scanline < cinfo.output_height) {
		char *buffer = g_lvglGram[h++];
		jpeg_read_scanlines(&cinfo, (JSAMPARRAY)&buffer, 1);
	}

	//(7)解压缩完毕
	jpeg_finish_decompress(&cinfo);

	//(8)释放资源
	jpeg_destroy_decompress(&cinfo);
}

static int v4l2_read_data(void)
{
    struct v4l2_buffer buf = {
        .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
        .memory = V4L2_MEMORY_MMAP,
    };

    while (1) {

        for(buf.index = 0; buf.index < FRAMEBUFFER_COUNT; buf.index++) {
			struct timeval logTime[3];
			gettimeofday(&logTime[0], NULL);
            ioctl(v4l2_fd, VIDIOC_DQBUF, &buf);     //出队
			gettimeofday(&logTime[1], NULL);
            // SaveRam("./test.jpg", g_bufInfos[buf.index].start, g_bufInfos[buf.index].length);
            // exit(0);
            unsigned short * pPic=g_bufInfos[buf.index].start;
			switch (fmt_type)
			{
			case V4L2_PIX_FMT_MJPEG:
				DrawJpg(pPic, g_bufInfos[buf.index].length);
				break;
			case V4L2_PIX_FMT_YUYV:
				DrawYuv(pPic, frm_width, frm_height);
				break;
			default:
    			pthread_exit(0);
			}
			gettimeofday(&logTime[2], NULL);
			LOGScroll(3, "picLen:%u  | draw in %.1fms | capture in %.3fms", 
							g_bufInfos[buf.index].length,
							TimeDiffMillSec(logTime[2], logTime[1]),
							TimeDiffMillSec(logTime[1], logTime[0]));

            // 数据处理完之后、再入队、往复
            ioctl(v4l2_fd, VIDIOC_QBUF, &buf);
            DrawCursor(g_lcdWidth / 2, g_lcdHeight / 2, RGB888(0x10, 0x83, 0xff));
        }
    }
    pthread_exit(0);
}

static void ProcSig(int sig)
{
    if (sig == SIGINT) {
        exit(EXIT_SUCCESS);
    }
}

int v4l2_main()
{
    signal(SIGINT, ProcSig);

    g_lcdWidth = 600;
    g_lcdHeight = 480;

    /* 初始化摄像头 */
    if (v4l2_dev_init("/dev/video0")){
		DrawJpgFile("/root/app/background.jpg");
		return -2;
	}

    /* 枚举所有格式并打印摄像头支持的分辨率及帧率 */
    v4l2_enum_formats();
    v4l2_print_formats();

    /* 设置格式 */
    if (v4l2_set_format(V4L2_PIX_FMT_MJPEG)) { // V4L2_PIX_FMT_MJPEG
		DrawJpgFile("/root/app/background.jpg");
		return -2;
	}

    /* 初始化帧缓冲：申请、内存映射、入队 */
    if (v4l2_init_buffer())
        exit(EXIT_FAILURE);

    /* 开启视频采集 */
    if (v4l2_stream_on())
        exit(EXIT_FAILURE);

    /* 读取数据：出队 */
    return v4l2_read_data();       //在函数内循环采集数据、将其显示到LCD屏
}

void SaveRam(const char* filePath, const uint8_t* data, size_t dataLen)
{
    int file = open(filePath, O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
    LOG("%s open succeed, writing %u to rom\r\n", filePath, dataLen);
    size_t wrLen = write(file, data, dataLen);
    if (wrLen != dataLen) {
        LOG("Failed: %s should write %llu, but only %llu is written", filePath, dataLen, wrLen);
    }
    LOG("write succeed\r\n");
    close(file);
}

void DrawJpgFile(const char* filePath)
{
    //(1)为jpeg对象分配空间并初始化
	struct jpeg_decompress_struct cinfo;	//解压jpeg的对象结构体
	struct jpeg_error_mgr jerr;				//定义错误结构体
	
	cinfo.err = jpeg_std_error(&jerr);		//错误处理结构体绑定
	jpeg_create_decompress(&cinfo);			//初始化jpeg的对象结构体

	//(2)指定解压缩数据源
	FILE *infile = fopen(filePath, "r+");
	if (infile == NULL)
	{
		LOG("fopen %s failed.", filePath);
		return;
	}
	jpeg_stdio_src(&cinfo, infile);//指定解压缩数据源

	//(3)获取文件信息
	jpeg_read_header(&cinfo, TRUE);

	//设定的缩小倍数
	cinfo.scale_num = 1;		//分子
	cinfo.scale_denom = 1;		//分母
	cinfo.out_color_space = JCS_RGB;	//颜色空间

	//(5)开始解压缩
	jpeg_start_decompress(&cinfo);
	frm_width = cinfo.output_width;
	frm_height = cinfo.output_height;
	LOG("displaying logo %s[%d:%d]\n", filePath, cinfo.output_width, cinfo.output_height);//设定解压缩之后的宽高

	//(6)取出数据（做相关的应用），安装一行一行去读取的
    int h = 0;
	while(cinfo.output_scanline < cinfo.output_height) {
		char *buffer = g_lvglGram[h++];
		jpeg_read_scanlines(&cinfo, (JSAMPARRAY)&buffer, 1);
	}

	//(7)解压缩完毕
	jpeg_finish_decompress(&cinfo);

	//(8)释放资源
	jpeg_destroy_decompress(&cinfo);
	fclose(infile);
}