#include "ds_v4l2_camera.h"


static bool v4l2_dev_flag = 0;
typedef struct camera_format
{
    unsigned char description[32];
    unsigned int pixelformat;
} cam_fmt;

typedef struct cam_buf_info
{
    unsigned short *start;
    unsigned long length;
} cam_buf_info;

static int width;
static int height;
static unsigned short *screen_base = NULL;
static int fb_fd = -1;
static int v4l2_fd = -1;
static cam_buf_info buf_infos[FRAMEBUFFER_COUNT];
static cam_fmt cam_fmts[10];
static int frm_width, frm_height;
static pthread_t g_ds_v4l2_camera_thread = NULL;

int capture_image = 0; // 是否在捕捉图像标志
int total_photos = 0;
int current_photo_index = 0;

void *ds_v4l2_camera_thread(void *args);
void capture_single_image();


/**
 * @brief 初始化帧缓冲设备
 * @return 成功返回0，失败返回-1
 */
static int fb_dev_init(void)
{
    // 定义并初始化可变和固定的屏幕信息结构体
    struct fb_var_screeninfo fb_var = {0};
    struct fb_fix_screeninfo fb_fix = {0};
    unsigned long screen_size; // 屏幕缓冲区大小

    // 打开帧缓冲设备文件
    fb_fd = open(FB_DEV, O_RDWR);
    if (0 > fb_fd)
    {
        fprintf(stderr, "open error: %s: %s\n", FB_DEV, strerror(errno));
        return -1;
    }

    // 获取可变屏幕信息
    if (ioctl(fb_fd, FBIOGET_VSCREENINFO, &fb_var) < 0)
    {
        perror("FBIOGET_VSCREENINFO");
        close(fb_fd);
        return -1;
    }
    
    // 获取固定屏幕信息
    if (ioctl(fb_fd, FBIOGET_FSCREENINFO, &fb_fix) < 0)
    {
        perror("FBIOGET_FSCREENINFO");
        close(fb_fd);
        return -1;
    }

    // 打印行长度信息
    printf("fb_var.line_length = %d\n", fb_fix.line_length);
    // 计算屏幕缓冲区总大小
    screen_size = fb_fix.line_length * fb_var.yres;
    // 获取屏幕宽度和高度
    width = fb_var.xres;
    height = fb_var.yres;
    printf("width = %d, height = %d\n", width, height);
    
    // 将屏幕缓冲区映射到进程地址空间
    screen_base = mmap(NULL, screen_size, PROT_READ | PROT_WRITE, MAP_SHARED, fb_fd, 0);
    printf("screen_base = %p\n", screen_base);
    if (MAP_FAILED == (void *)screen_base)
    {
        perror("mmap error");
        close(fb_fd);
        return -1;
    }

    // 设置背景色为黑色
    uint32_t *base = (uint32_t *)screen_base;
    for (int i = 0; i < width * height; i++) {
        base[i] = 0xFF000000; // 黑色背景 (ARGB格式)
    }
    
    return 0;
}

/*
 * V4L2摄像头驱动相关函数实现
 * 包含设备初始化、格式设置、缓冲区管理、图像采集和显示等功能
 */
/**
 * @brief 初始化V4L2设备
 * @param device 设备路径，如"/dev/video9"
 * @return 成功返回0，失败返回-1
 */
static int v4l2_dev_init(const char *device)
{
    struct v4l2_capability cap = {0};  // 设备能力结构体，用于查询设备支持的功能

    // 打开设备文件
    v4l2_fd = open(device, O_RDWR);
    if (0 > v4l2_fd)
    {
        fprintf(stderr, "open error: %s: %s\n", device, strerror(errno));
        return -1;
    }

    // 查询设备能力
    ioctl(v4l2_fd, VIDIOC_QUERYCAP, &cap);

    // 检查设备是否支持视频捕获
    if (!(V4L2_CAP_VIDEO_CAPTURE & cap.capabilities))
    {
        fprintf(stderr, "Error: %s: No capture video device!\n", device);
        close(v4l2_fd);
        return -1;
    }

    return 0;
}

/**
 * @brief 设置视频格式
 * @return 成功返回0，失败返回-1
 */
static int v4l2_set_format(void)
{
    struct v4l2_format fmt = {0};  // 视频格式结构体
    struct v4l2_streamparm streamparm = {0};  // 流参数结构体

    // 设置视频捕获类型
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    // 设置图像宽度和高度
    fmt.fmt.pix.width = IMAGE_WIDTH;
    fmt.fmt.pix.height = IMAGE_HEIGHT;
    // 设置像素格式为YUYV
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; // 改为使用 YUYV 格式
    // 设置视频格式
    if (0 > ioctl(v4l2_fd, VIDIOC_S_FMT, &fmt))
    {
        fprintf(stderr, "ioctl error: VIDIOC_S_FMT: %s\n", strerror(errno));
        return -1;
    }

    // 检查设备是否支持YUYV格式
    if (V4L2_PIX_FMT_YUYV != fmt.fmt.pix.pixelformat) // 检查 YUYV 格式
    {
        fprintf(stderr, "Error: the device does not support YUYV format!\n");
        return -1;
    }

    // 保存实际的帧宽度和高度
    frm_width = fmt.fmt.pix.width;
    frm_height = fmt.fmt.pix.height;
    printf("视频帧大小<%d * %d>, 格式: YUYV\n", frm_width, frm_height);

    // 其余代码保持不变...
    return 0;
}

/**
 * @brief 初始化视频缓冲区
 * @return 成功返回0，失败返回-1
 */
static int v4l2_init_buffer(void)
{
    struct v4l2_requestbuffers reqbuf = {0};  // 请求缓冲区结构体
    struct v4l2_buffer buf = {0};  // 缓冲区结构体

    // 设置请求的缓冲区数量
    reqbuf.count = FRAMEBUFFER_COUNT;
    reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    reqbuf.memory = V4L2_MEMORY_MMAP;
    // 请求缓冲区
    if (0 > ioctl(v4l2_fd, VIDIOC_REQBUFS, &reqbuf))
    {
        fprintf(stderr, "ioctl error: VIDIOC_REQBUFS: %s\n", strerror(errno));
        return -1;
    }

    // 查询缓冲区信息并映射内存
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;
    for (buf.index = 0; buf.index < FRAMEBUFFER_COUNT; buf.index++)
    {
        // 查询缓冲区
        ioctl(v4l2_fd, VIDIOC_QUERYBUF, &buf);
        // 保存缓冲区长度
        buf_infos[buf.index].length = buf.length;
        // 映射缓冲区内存
        buf_infos[buf.index].start = mmap(NULL, buf.length,
                                          PROT_READ | PROT_WRITE, MAP_SHARED,
                                          v4l2_fd, buf.m.offset);
        if (MAP_FAILED == buf_infos[buf.index].start)
        {
            perror("mmap error");
            return -1;
        }
    }

    // 将所有缓冲区放入队列
    for (buf.index = 0; buf.index < FRAMEBUFFER_COUNT; buf.index++)
    {
        if (0 > ioctl(v4l2_fd, VIDIOC_QBUF, &buf))
        {
            fprintf(stderr, "ioctl error: VIDIOC_QBUF: %s\n", strerror(errno));
            return -1;
        }
    }

    return 0;
}

/**
 * @brief 启动视频流
 * @return 成功返回0，失败返回-1
 */
static int v4l2_stream_on(void)
{
    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    // 启动视频流
    if (0 > ioctl(v4l2_fd, VIDIOC_STREAMON, &type))
    {
        fprintf(stderr, "ioctl error: VIDIOC_STREAMON: %s\n", strerror(errno));
        return -1;
    }

    return 0;
}

/**
 * @brief 关闭摄像头并释放资源
 */
void close_v4l2_camera(void)
{
    // 取消并等待摄像头线程结束
    if (g_ds_v4l2_camera_thread != NULL)
    {
        pthread_cancel(g_ds_v4l2_camera_thread);
        pthread_join(g_ds_v4l2_camera_thread, NULL);
        g_ds_v4l2_camera_thread = NULL;
    }

    // 停止视频流
    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (0 > ioctl(v4l2_fd, VIDIOC_STREAMOFF, &type))
    {
        fprintf(stderr, "ioctl error: VIDIOC_STREAMOFF: %s\n", strerror(errno));
    }

    // 释放所有缓冲区的内存映射
    for (int i = 0; i < FRAMEBUFFER_COUNT; i++)
    {
        if (buf_infos[i].start != NULL && buf_infos[i].length > 0)
        {
            if (munmap(buf_infos[i].start, buf_infos[i].length) < 0)
            {
                fprintf(stderr, "munmap error: %s\n", strerror(errno));
            }
            buf_infos[i].start = NULL;
            buf_infos[i].length = 0;
        }
    }
    
    // 关闭文件描述符
    if (v4l2_fd >= 0) {
        close(v4l2_fd);
        v4l2_fd = -1;
    }
    
    if (fb_fd >= 0) {
        close(fb_fd);
        fb_fd = -1;
    }
    
    // 重置初始化标志
    v4l2_dev_flag = 0;
}



int ds_v4l2_camera_init(void)
{
    printf("Opening camera...\n");
    
    if (fb_dev_init())
    {
        fprintf(stderr, "Failed to initialize framebuffer\n");
        return -1;
    }

    printf("Screen size: %dx%d\n", width, height);
    printf("Image size: %dx%d\n", IMAGE_WIDTH, IMAGE_HEIGHT);
    
    if (v4l2_dev_flag == 0)
    {
        printf("Opening camera...\n");
        v4l2_dev_flag = 1;
        
        if (v4l2_dev_init("/dev/video9"))
        {
            fprintf(stderr, "Failed to initialize camera device\n");
            return -1;
        }
    }
    else
    {
        printf("Camera already opened, cannot open again\n");
        return -1;
    }

    if (v4l2_set_format())
    {
        fprintf(stderr, "Failed to set camera format\n");
        return -1;
    }

    if (v4l2_init_buffer())
    {
        fprintf(stderr, "Failed to initialize camera buffers\n");
        return -1;
    }

    if (v4l2_stream_on())
    {
        fprintf(stderr, "Failed to start camera stream\n");
        return -1;
    }

    int res;
    res = pthread_create(&g_ds_v4l2_camera_thread, NULL, ds_v4l2_camera_thread, NULL);
    if (res != 0)
    {
        printf("pthread_create ds_v4l2_camera_thread failed: %d\n", res);
        return -1;
    }
    printf("ds_v4l2_camera_thread created successfully\n");
    return 0;
}


static void save_image(unsigned short *data, int width, int height, const char *filename)
{
    FILE *file = fopen(filename, "wb");
    if (!file)
    {
        perror("Error opening file for writing");
        return;
    }

    size_t size = width * height * 2;
    size_t written = fwrite(data, 1, size, file);
    if (written != size)
    {
        perror("Error writing image data to file");
    }

    fclose(file);

    printf("Image saved as %s\n", filename);
}



// YUYV 到 XRGB8888 的转换函数
static void yuyv_to_xrgb8888(uint8_t *yuyv, uint32_t *xrgb, int width, int height)
{
    printf("Converting YUYV to XRGB8888, size: %dx%d\n", width, height);
    int i, j;
    for (j = 0; j < height; j++) {
        for (i = 0; i < width; i += 2) {
            int y0 = yuyv[0];
            int u  = yuyv[1];
            int y1 = yuyv[2];
            int v  = yuyv[3];
            yuyv += 4;

            // 转换公式
            int c = y0 - 16;
            int d = u - 128;
            int e = v - 128;

            int r0 = (298 * c + 409 * e + 128) >> 8;
            int g0 = (298 * c - 100 * d - 208 * e + 128) >> 8;
            int b0 = (298 * c + 516 * d + 128) >> 8;

            int r1 = (298 * (y1-16) + 409 * e + 128) >> 8;
            int g1 = (298 * (y1-16) - 100 * d - 208 * e + 128) >> 8;
            int b1 = (298 * (y1-16) + 516 * d + 128) >> 8;

            // 限制范围
            r0 = (r0 < 0) ? 0 : (r0 > 255) ? 255 : r0;
            g0 = (g0 < 0) ? 0 : (g0 > 255) ? 255 : g0;
            b0 = (b0 < 0) ? 0 : (b0 > 255) ? 255 : b0;
            r1 = (r1 < 0) ? 0 : (r1 > 255) ? 255 : r1;
            g1 = (g1 < 0) ? 0 : (g1 > 255) ? 255 : g1;
            b1 = (b1 < 0) ? 0 : (b1 > 255) ? 255 : b1;

            xrgb[i] = (0xFF << 24) | (r0 << 16) | (g0 << 8) | b0;
            xrgb[i+1] = (0xFF << 24) | (r1 << 16) | (g1 << 8) | b1;
        }
        xrgb += width;
    }
    printf("Conversion completed\n");
}


// YUYV 到 RGB565 的转换函数
static void yuyv_to_rgb565(uint8_t *yuyv, uint16_t *rgb565, int width, int height)
{
    int i, j;
    for (j = 0; j < height; j++) {
        for (i = 0; i < width; i += 2) {
            int y0 = yuyv[0];
            int u  = yuyv[1];
            int y1 = yuyv[2];
            int v  = yuyv[3];
            yuyv += 4;

            // 转换公式
            int c = y0 - 16;
            int d = u - 128;
            int e = v - 128;

            int r0 = (298 * c + 409 * e + 128) >> 8;
            int g0 = (298 * c - 100 * d - 208 * e + 128) >> 8;
            int b0 = (298 * c + 516 * d + 128) >> 8;

            int r1 = (298 * (y1-16) + 409 * e + 128) >> 8;
            int g1 = (298 * (y1-16) - 100 * d - 208 * e + 128) >> 8;
            int b1 = (298 * (y1-16) + 516 * d + 128) >> 8;

            // 限制范围
            r0 = (r0 < 0) ? 0 : (r0 > 255) ? 255 : r0;
            g0 = (g0 < 0) ? 0 : (g0 > 255) ? 255 : g0;
            b0 = (b0 < 0) ? 0 : (b0 > 255) ? 255 : b0;
            r1 = (r1 < 0) ? 0 : (r1 > 255) ? 255 : r1;
            g1 = (g1 < 0) ? 0 : (g1 > 255) ? 255 : g1;
            b1 = (b1 < 0) ? 0 : (b1 > 255) ? 255 : b1;

            // 转换为 RGB565
            rgb565[i] = ((r0 & 0xF8) << 8) | ((g0 & 0xFC) << 3) | (b0 >> 3);
            rgb565[i+1] = ((r1 & 0xF8) << 8) | ((g1 & 0xFC) << 3) | (b1 >> 3);
        }
        rgb565 += width;
    }
}



/**
 * @brief 从V4L2设备读取视频数据并进行处理和显示
 * 该函数实现了视频流的实时捕获、YUYV格式转XRGB8888格式显示，
 * 以及图像捕获保存功能
 */
static void v4l2_read_data(void)
{
    struct v4l2_buffer buf = {0};  // V4L2缓冲区结构体，用于存储视频缓冲区信息
    
    // 计算居中位置
    const int x = (width - IMAGE_WIDTH) / 2;
    const int y = (height - IMAGE_HEIGHT) / 2;
    
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;

    while (1)
    {
        // 从队列中取出一个缓冲区
        if (ioctl(v4l2_fd, VIDIOC_DQBUF, &buf) < 0) {
            perror("VIDIOC_DQBUF");
            usleep(10000);
            continue;
        }

        // 直接映射帧缓冲区
        uint32_t *base = (uint32_t *)screen_base;
        uint8_t *yuyv_data = (uint8_t *)buf_infos[buf.index].start;
        
        // 不再清除整个屏幕，只绘制摄像头图像区域
        // 这样可以避免与LVGL的UI绘制冲突

        // YUYV格式：Y0 U0 Y1 V0 Y2 U2 Y3 V2...(摄像头输出的格式)
        // 每4字节包含2个像素信息（亮度Y0,Y1 + 色度U,V）
        
        // 将 YUYV 转换为 XRGB8888 并直接绘制到屏幕（开发板支持XRGB8888）
        for (int j = 0; j < IMAGE_HEIGHT; j++) {
            for (int i = 0; i < IMAGE_WIDTH; i += 2) {
                // 提取YUYV分量
                int y0 = yuyv_data[0]; // 第一个像素亮度
                int u  = yuyv_data[1]; // 色度U（两个像素共享）
                int y1 = yuyv_data[2]; // 第二个像素亮度  
                int v  = yuyv_data[3]; // 色度V（两个像素共享）
                yuyv_data += 4;

                // 分量归一化（Y范围16-235, UV范围16-240）
                int c0 = y0 - 16;
                int c1 = y1 - 16;
                int d = u - 128;
                int e = v - 128;

                // YUV到RGB转换（ITU-R BT.601标准）
                // 使用整数运算避免浮点开销
                int r0 = (298 * c0 + 409 * e + 128) >> 8;
                int g0 = (298 * c0 - 100 * d - 208 * e + 128) >> 8;
                int b0 = (298 * c0 + 516 * d + 128) >> 8;

                int r1 = (298 * c1 + 409 * e + 128) >> 8;
                int g1 = (298 * c1 - 100 * d - 208 * e + 128) >> 8;
                int b1 = (298 * c1 + 516 * d + 128) >> 8;

                // 限制范围
                r0 = (r0 < 0) ? 0 : (r0 > 255) ? 255 : r0;
                g0 = (g0 < 0) ? 0 : (g0 > 255) ? 255 : g0;
                b0 = (b0 < 0) ? 0 : (b0 > 255) ? 255 : b0;
                r1 = (r1 < 0) ? 0 : (r1 > 255) ? 255 : r1;
                g1 = (g1 < 0) ? 0 : (g1 > 255) ? 255 : g1;
                b1 = (b1 < 0) ? 0 : (b1 > 255) ? 255 : b1;

                // 计算屏幕位置
                int screen_x1 = x + i;
                int screen_x2 = x + i + 1;
                int screen_y = y + j;
                
                // 确保位置在屏幕范围内，写入缓冲区
                if (screen_y >= 0 && screen_y < height) {
                    if (screen_x1 >= 0 && screen_x1 < width) {
                        base[screen_y * width + screen_x1] = 
                            (0xFF << 24) | (r0 << 16) | (g0 << 8) | b0; // 组合为XRGB8888格式：0xFFRRGGBB
                    }
                    if (screen_x2 >= 0 && screen_x2 < width) {
                        base[screen_y * width + screen_x2] = 
                            (0xFF << 24) | (r1 << 16) | (g1 << 8) | b1; // 组合为XRGB8888格式：0xFFRRGGBB
                    }
                }
            }
        }

        // 如果需要捕捉图像就把这一帧画面存为RGB565格式
        if (capture_image)
        {
            capture_image = 0;
            printf("Capturing image...\n");
            
            static int image_count = 0;
            char photo_path[256];
            snprintf(photo_path, sizeof(photo_path), "/mywork/img_rgb/photo_%d.rgb", image_count++);
            
            // 保存为 RGB565 格式
            size_t rgb_size = IMAGE_WIDTH * IMAGE_HEIGHT * 2;
            uint16_t *rgb565_data = malloc(rgb_size);
            if (rgb565_data) {
                // 重新获取YUYV数据
                uint8_t *yuyv_src = (uint8_t *)buf_infos[buf.index].start;
                yuyv_to_rgb565(yuyv_src, rgb565_data, IMAGE_WIDTH, IMAGE_HEIGHT);
                save_image(rgb565_data, IMAGE_WIDTH, IMAGE_HEIGHT, photo_path);
                free(rgb565_data);
            }
            

            // pthread_mutex_lock(&camera_mutex);
            total_photos = image_count;
            // pthread_mutex_unlock(&camera_mutex);
            // total_photos = image_count;
            printf("Image saved as %s, total_photos = %d\n", photo_path, total_photos);
        }
        
        // 将缓冲区重新放回队列
        if (ioctl(v4l2_fd, VIDIOC_QBUF, &buf) < 0) {
            perror("VIDIOC_QBUF");
            usleep(10000);
            continue;
        }
        
        usleep(10000); // 10ms延迟
    }
}



void display_photo(const char *photo_path, int pho_width, int pho_height)
{
    FILE *file = fopen(photo_path, "rb");
    if (!file)
    {
        perror("Failed to open photo file");
        return;
    }

    size_t pho_size = pho_width * pho_height * 2;
    uint16_t *image_data = malloc(pho_size);
    if (!image_data)
    {
        perror("Failed to allocate memory for photo");
        fclose(file);
        return;
    }

    size_t read_bytes = fread(image_data, 1, pho_size, file);
    if (read_bytes != pho_size)
    {
        perror("Failed to read photo data");
        printf("Expected: %ld, Read: %ld\n", pho_size, read_bytes);
        free(image_data);
        fclose(file);
        return;
    }
    fclose(file);

    // 计算居中位置
    const int x = (width - pho_width) / 2;
    const int y = (height - pho_height) / 2;
    
    uint32_t *base = (uint32_t *)screen_base;
    
    // 不再清除整个屏幕，只绘制照片区域
    // 这样可以避免与LVGL的UI绘制冲突
    
    // 将 RGB565 转换为 XRGB8888 并绘制到屏幕
    for (int j = 0; j < pho_height; j++)
    {
        for (int i = 0; i < pho_width; i++)
        {
            uint16_t rgb565 = image_data[j * pho_width + i];
            uint8_t r = (rgb565 >> 11) & 0x1F;
            uint8_t g = (rgb565 >> 5) & 0x3F;
            uint8_t b = rgb565 & 0x1F;
            
            // 转换为 8 位
            r = (r * 255) / 31;
            g = (g * 255) / 63;
            b = (b * 255) / 31;
            
            int screen_x = x + i;
            int screen_y = y + j;
            
            if (screen_x >= 0 && screen_x < width && screen_y >= 0 && screen_y < height)
            {
                base[screen_y * width + screen_x] = (0xFF << 24) | (r << 16) | (g << 8) | b;
            }
        }
    }

    free(image_data);
    printf("Photo displayed successfully at position (%d, %d)\n", x, y);
}

// 暂停摄像头流
int pause_v4l2_camera(void)
{
    if (g_ds_v4l2_camera_thread != NULL)
    {
        pthread_cancel(g_ds_v4l2_camera_thread);
        pthread_join(g_ds_v4l2_camera_thread, NULL);
        g_ds_v4l2_camera_thread = NULL;
    }

    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (0 > ioctl(v4l2_fd, VIDIOC_STREAMOFF, &type))
    {
        fprintf(stderr, "ioctl error: VIDIOC_STREAMOFF: %s\n", strerror(errno));
        return -1;
    }
    
    return 0;
}

// 恢复摄像头流
int resume_v4l2_camera(void)
{
    if (v4l2_stream_on())
    {
        fprintf(stderr, "Failed to start camera stream\n");
        return -1;
    }

    int res;
    res = pthread_create(&g_ds_v4l2_camera_thread, NULL, ds_v4l2_camera_thread, NULL);
    if (res != 0)
    {
        printf("pthread_create ds_v4l2_camera_thread failed: %d\n", res);
        return -1;
    }
    printf("ds_v4l2_camera_thread created successfully\n");
    return 0;
}

void *ds_v4l2_camera_thread(void *args)
{
    printf("Camera thread started\n");
    v4l2_read_data();
    printf("Camera thread exiting\n");
    return NULL;
}