#include "USBCamera.hpp"
/* 相关摄像头驱动库 */
#include <linux/videodev2.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <stdio.h>
#include <string.h>
#include <sys/mman.h>
#include <jpeglib.h>
#include <stdlib.h>

#include <stdint.h>
#include <x264.h>

#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>

USBCamera * usb_camera = nullptr;
USBCamera * USBCamera::getInstance(){
    if( usb_camera == nullptr ){
        usb_camera = new USBCamera();
    }
    return usb_camera;
}

bool USBCamera::openCamera(){
    int ret = -1;
    fd = open(DEVICE_NAME , O_RDWR);
    if( fd == -1 ) return false;
    // printf("fd:%x\r\n",fd);
    /* 查看设备能力 */

    ret = ioctl(fd,VIDIOC_QUERYCAP,&cap);
    //  打印设备信息
    printf("Driver: %s\n", cap.driver);
    printf("Card: %s\n", cap.card);
    printf("Bus Info: %s\n", cap.bus_info);
    printf("Version: %u\n", cap.version);
    printf("Capabilities: 0x%08x\n", cap.capabilities);   

    /* 查看设备支持的摄像头形式 */
    video_format_desc.index = 0;
    video_format_desc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    while( ioctl(fd , VIDIOC_ENUM_FMT , &video_format_desc) == 0){
        printf("Index: %d, Format: %s\n", video_format_desc.index, video_format_desc.description);
        video_format_desc.index++;        
    }

    /* 设置视频格式 */
    memset(&fmt , 0 , sizeof(fmt));
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.fmt.pix.width = WIDTH;
    fmt.fmt.pix.height = HEIGHT;
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
    fmt.fmt.pix.field = V4L2_FIELD_NONE;
    if( ioctl(fd , VIDIOC_S_FMT , &fmt) == -1 ) printf("VIDIOC_S_FMT Error\r\n");

    /* 请求缓冲区 */
    memset(&req_buffers , 0 , sizeof(req_buffers));
    req_buffers.count = BUFFERS_NUMBERS;
    req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req_buffers.memory = V4L2_MEMORY_MMAP;
    if(ioctl(fd , VIDIOC_REQBUFS , &req_buffers) == -1){
        printf("VIDIOC_REQBUFS Error\r\n");
    }

    /* 查询缓冲区 */
    for(int i = 0; i < BUFFERS_NUMBERS ; i++){
        memset(&buf , 0 , sizeof(buf));
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = i;
        if(ioctl(fd,VIDIOC_QUERYBUF,&buf) == -1 ){
            printf("VIDIOC_QUERYBUF Error\r\n");
        }
        buffers[i].length = buf.length;

        buffers[i].start = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset);
        if (buffers[i].start == MAP_FAILED) {
            printf("MAP_FAILED index: %d \r\n",i);
        }
    }

    // 启动视频流
    for (int i = 0; i < BUFFERS_NUMBERS; i++) {
        memset(&buf, 0, sizeof(buf));
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = i;
        if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) {
            printf("VIDIOC_QBUF");
            return false;
        }
    }

    // 开始视频捕获
    int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (ioctl(fd, VIDIOC_STREAMON, &type) == -1) {
        printf("VIDIOC_STREAMON");
        return false;
    }
    return true;   
}

bool USBCamera::closeCamera(){

    return true;   
}

int USBCamera::readFrameBuffer(void * buffer , unsigned int *size){
    memset(&buf, 0, sizeof(buf));
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;

    /* 从队列中取出一帧数据 */
    if(ioctl( fd, VIDIOC_DQBUF, &buf) == -1 ){
        printf("VIDIOC_DQBUF Error\r\n");
    }

    /* 将帧数据拷贝到提供的缓冲区 */
    if (buf.index < BUFFERS_NUMBERS) {
        memcpy(buffer, buffers[buf.index].start, buf.bytesused);
        *size = buf.bytesused;
    } else {
        printf("Invalid buffer index: %d \r\n", buf.index);
        return -1;
    }

    /* 重新入队缓冲区 */
    if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) {
        printf("VIDIOC_QBUF Error\r\n");
    }

    return 0;
}

void USBCamera::yuyvToJPEG(unsigned char* yuyv_data, int width, int height, int quality, const char* output_file){

    struct jpeg_compress_struct cinfo;
    struct jpeg_error_mgr jerr;
    JSAMPROW row_pointer[1];
    FILE *outfile = fopen(output_file, "wb");
    if (!outfile) {
        printf("Error opening output jpeg file\n");
        return;
    }
    
    cinfo.err = jpeg_std_error(&jerr);
    jpeg_create_compress(&cinfo);
    jpeg_stdio_dest(&cinfo, outfile);
    
    cinfo.image_width = width;
    cinfo.image_height = height;
    cinfo.input_components = 3;
    cinfo.in_color_space = JCS_RGB;
    
    jpeg_set_defaults(&cinfo);
    jpeg_set_quality(&cinfo, quality, TRUE);
    
    jpeg_start_compress(&cinfo, TRUE);
    
    JSAMPLE* rgb_data = (JSAMPLE*)malloc(width * height * 3);
    
    for (int y = 0; y < height; y++) {
        for (int x = 0; x < width; x += 2) {
            int yuyv_index = y * width * 2 + x * 2;
            int rgb_index = y * width * 3 + x * 3;
            
            int y1 = yuyv_data[yuyv_index];
            int u = yuyv_data[yuyv_index + 1];
            int y2 = yuyv_data[yuyv_index + 2];
            int v = yuyv_data[yuyv_index + 3];
            
            rgb_data[rgb_index] = y1 + 1.402 * (v - 128); // R
            rgb_data[rgb_index + 1] = y1 - 0.344136 * (u - 128) - 0.714136 * (v - 128); // G
            rgb_data[rgb_index + 2] = y1 + 1.772 * (u - 128); // B
            
            rgb_data[rgb_index + 3] = y2 + 1.402 * (v - 128); // R
            rgb_data[rgb_index + 4] = y2 - 0.344136 * (u - 128) - 0.714136 * (v - 128); // G
            rgb_data[rgb_index + 5] = y2 + 1.772 * (u - 128); // B
        }
        row_pointer[0] = &rgb_data[y * width * 3];
        jpeg_write_scanlines(&cinfo, row_pointer, 1);
    }
    
    jpeg_finish_compress(&cinfo);
    jpeg_destroy_compress(&cinfo);
    
    fclose(outfile);
    free(rgb_data); 
}

int USBCamera::yuyv422ToYUV420(unsigned char *input_buffer, int width, int height, unsigned char *output_buffer) {
    // YUV420 的输出缓冲区：Y、U 和 V 分量
    unsigned char *y_plane = output_buffer;  // Y 平面
    unsigned char *u_plane = output_buffer + (width * height);  // U 平面
    unsigned char *v_plane = u_plane + (width * height / 4);  // V 平面

    int i, j;
    int yuyv_index = 0; // 输入数据的索引
    int uv_index = 0;   // 输出 U 和 V 平面的索引

    // 遍历每行像素
    for (i = 0; i < height; i++) {
        // 处理每一列像素
        for (j = 0; j < width; j++) {
            // 从 YUYV 中提取 Y 分量，YUYV 格式是 Y1, U, Y2, V, ...
            y_plane[yuyv_index] = input_buffer[2 * yuyv_index]; // Y 分量
            yuyv_index++;

            // 每两个像素共享一个 U 和一个 V 分量
            if (j % 2 == 0) {
                // 将 YUYV 中的 U 和 V 分量提取到 U 和 V 平面
                u_plane[uv_index] = input_buffer[2 * yuyv_index - 1];  // U 分量
                v_plane[uv_index] = input_buffer[2 * yuyv_index];      // V 分量
                uv_index++;
            }
        }
    }

    // 下采样 U 和 V 分量（从 4:2:2 转换到 4:2:0）
    // YUV420 中，U 和 V 的宽度和高度是原始的一半
    int uv_width = width / 2;
    int uv_height = height / 2;

    // 遍历 U 和 V 平面，并进行下采样
    for (i = 0; i < uv_height; i++) {
        for (j = 0; j < uv_width; j++) {
            // 对 2x2 区域的 U 和 V 分量进行平均（简单的取平均值）
            int sum_u = 0, sum_v = 0;
            int count = 0;

            for (int m = 0; m < 2; m++) {
                for (int n = 0; n < 2; n++) {
                    if (i * 2 + m < height && j * 2 + n < width) {
                        sum_u += u_plane[(i * 2 + m) * uv_width + (j * 2 + n)];
                        sum_v += v_plane[(i * 2 + m) * uv_width + (j * 2 + n)];
                        count++;
                    }
                }
            }

            u_plane[i * uv_width + j] = sum_u / count;  // 计算 U 的平均值
            v_plane[i * uv_width + j] = sum_v / count;  // 计算 V 的平均值
        }
    }
    return 0;
}


int USBCamera::yuv420ToH264(unsigned char *input_buffer, unsigned int input_size, unsigned char *out_data, unsigned int *out_size) {
    int ret;
    unsigned char *encoded_data = out_data; 
    if (encoded_data == NULL) {
        printf("Memory allocation failed\n");
        return -1;
    }
    // x264 编码器相关变量
    int nal_count = 0;
    x264_nal_t *nal_units = nullptr;
    x264_t *encoder_handle = nullptr;
    x264_picture_t *pic_in = (x264_picture_t *)malloc(sizeof(x264_picture_t));
    x264_picture_t *pic_out = (x264_picture_t *)malloc(sizeof(x264_picture_t));
    x264_param_t *encoder_params = (x264_param_t *)malloc(sizeof(x264_param_t));

    // 初始化 x264 参数
    x264_param_default(encoder_params);
    encoder_params->i_width = WIDTH;
    encoder_params->i_height = HEIGHT;
    encoder_params->i_csp = X264_CSP_I420;
    encoder_params->i_fps_num = 25;  // 设置帧率为 25fps
    encoder_params->i_fps_den = 1;
    encoder_params->i_keyint_max = 10;  // 设置最大关键帧间隔
    encoder_params->i_bframe = 0;  // 设置不使用 B 帧


    encoder_params->i_log_level  = X264_LOG_DEBUG;
    encoder_params->i_threads  = X264_SYNC_LOOKAHEAD_AUTO;
    encoder_params->i_frame_total = 0;
    encoder_params->b_open_gop  = 0;
    encoder_params->i_bframe_pyramid = 0;
    encoder_params->rc.i_qp_constant=0;
    encoder_params->rc.i_qp_max=0;
    encoder_params->rc.i_qp_min=0;
    encoder_params->i_bframe_adaptive = X264_B_ADAPT_TRELLIS;
    encoder_params->i_timebase_den = encoder_params->i_fps_num;

    // 应用预定义配置文件
    x264_param_apply_profile(encoder_params, x264_profile_names[4]);

    // 打开编码器
    encoder_handle = x264_encoder_open(encoder_params);
    if (!encoder_handle) {
        printf("Error opening x264 encoder\n");
        return -1;
    }

    // 初始化帧
    x264_picture_init(pic_out);

    if (x264_picture_alloc(pic_out, X264_CSP_I420, WIDTH, HEIGHT) < 0) {
        printf("error: x264_picture_alloc failed\r\n");
        return -1;
    }

    if (x264_picture_alloc(pic_in, X264_CSP_I420, WIDTH, HEIGHT) < 0) {
        printf("error: x264_picture_alloc failed\r\n");
        return -1;
    }
    
    if (pic_in->img.plane[0] == NULL) {
        printf("Error: pic_in->img.plane[0] is NULL\n");
        return -1;
    }
    if (pic_in->img.plane[1] == NULL) {
        printf("Error: pic_in->img.plane[1] is NULL\n");
        return -1;
    }
    if (pic_in->img.plane[2] == NULL) {
        printf("Error: pic_in->img.plane[2] is NULL\n");
        return -1;
    }
    memset(pic_in->img.plane[0], 0, WIDTH * HEIGHT);  // 清零 Y 平面
    memset(pic_in->img.plane[1], 0, (WIDTH / 2) * (HEIGHT / 2));  // 清零 U 平面
    memset(pic_in->img.plane[2], 0, (WIDTH / 2) * (HEIGHT / 2));  // 清零 V 平面
    memcpy(pic_in->img.plane[0],input_buffer,WIDTH * HEIGHT);
    memcpy(pic_in->img.plane[1],input_buffer,WIDTH * HEIGHT / 4);
    memcpy(pic_in->img.plane[2],input_buffer,WIDTH * HEIGHT / 4);

    pic_in->i_pts = 0;  // 设置时间戳，通常是帧的序号
    pic_in->i_type = X264_TYPE_IDR; // 强制为I帧
    // pic_in->i_type = X264_TYPE_AUTO; // 自动
    printf("Input plane[0]: %p\n", pic_in->img.plane[0]);
    printf("Input plane[1]: %p\n", pic_in->img.plane[1]);
    printf("Input plane[2]: %p\n", pic_in->img.plane[2]);
    printf("Output plane[0]: %p\n", pic_out->img.plane[0]);
    printf("Output plane[1]: %p\n", pic_out->img.plane[1]);
    printf("Output plane[2]: %p\n", pic_out->img.plane[2]);

    // 编码当前帧
    for (int i = 0; i < 12; i++){
        ret = x264_encoder_encode(encoder_handle, &nal_units, &nal_count, pic_in, pic_out);
        pic_in->i_pts ++;
    }

    if (ret < 0) {
        printf("Error encoding frame\n");
        free(encoded_data);
        return -1;
    }

    if (nal_count == 0) {
        printf("No NAL units generated\r\n");
        printf("ret %d\r\n",ret);
        return -1;
    }else{
        printf("nal_count %d\r\n",nal_count);
    }
    
    unsigned int total_size = 0;
    for( int i = 0; i < nal_count ; i++){
        total_size += nal_units[i].i_payload;
    }

    if( * out_size < total_size ){
        printf("Out buffers is too small\r\n");
        return -1;
    }

    unsigned char *output_ptr = encoded_data;
    for (int i = 0; i < nal_count; i++) {
        memcpy(output_ptr, nal_units[i].p_payload, nal_units[i].i_payload);  
        output_ptr += nal_units[i].i_payload; 
    }

    printf("H264 Size %d\r\n",total_size);
    *out_size = total_size;

    return 0;
}
