#include "IngenicVideoSource.h"
#include <chrono>
#include <random>
#include <cstring>
#include <iostream>

// 全局变量定义
struct chn_conf chn[FS_CHN_NUM];
IMPSensorInfo sensor_info[1];

// 君正SDK系统初始化
int sample_system_init() {
    IMP_LOG_DBG("IngenicVideoSource", "sample_system_init start\n");

    memset(&chn, 0, sizeof(chn));
    
    // 配置通道0
    chn[0].index = CH0_INDEX;
    chn[0].enable = CHN0_EN;
    chn[0].payloadType = IMP_ENC_PROFILE_AVC_MAIN;
    chn[0].fs_chn_attr.i2dattr.i2d_enable = 0;
    chn[0].fs_chn_attr.i2dattr.flip_enable = 0;
    chn[0].fs_chn_attr.i2dattr.mirr_enable = 0;
    chn[0].fs_chn_attr.i2dattr.rotate_enable = 1;
    chn[0].fs_chn_attr.i2dattr.rotate_angle = 270;
    chn[0].fs_chn_attr.pixFmt = PIX_FMT_NV12;
    chn[0].fs_chn_attr.outFrmRateNum = FIRST_SENSOR_FRAME_RATE_NUM;
    chn[0].fs_chn_attr.outFrmRateDen = FIRST_SENSOR_FRAME_RATE_DEN;
    chn[0].fs_chn_attr.nrVBs = 2;
    chn[0].fs_chn_attr.type = FS_PHY_CHANNEL;
    chn[0].fs_chn_attr.crop.enable = FIRST_CROP_EN;
    chn[0].fs_chn_attr.crop.top = 0;
    chn[0].fs_chn_attr.crop.left = 0;
    chn[0].fs_chn_attr.crop.width = FIRST_SENSOR_WIDTH;
    chn[0].fs_chn_attr.crop.height = FIRST_SENSOR_HEIGHT;
    chn[0].fs_chn_attr.scaler.enable = 1;
    chn[0].fs_chn_attr.scaler.outwidth = FIRST_SENSOR_WIDTH;
    chn[0].fs_chn_attr.scaler.outheight = FIRST_SENSOR_HEIGHT;
    chn[0].fs_chn_attr.picWidth = FIRST_SENSOR_WIDTH;
    chn[0].fs_chn_attr.picHeight = FIRST_SENSOR_HEIGHT;
    chn[0].framesource_chn = { DEV_ID_FS, CH0_INDEX, 0 };
    chn[0].imp_encoder = { DEV_ID_ENC, CH0_INDEX, 0 };

    // 配置通道1
    chn[1].index = CH1_INDEX;
    chn[1].enable = CHN1_EN;
    chn[1].payloadType = IMP_ENC_PROFILE_AVC_MAIN;
    chn[1].fs_chn_attr.i2dattr.i2d_enable = 0;
    chn[1].fs_chn_attr.i2dattr.flip_enable = 0;
    chn[1].fs_chn_attr.i2dattr.mirr_enable = 0;
    chn[1].fs_chn_attr.i2dattr.rotate_enable = 1;
    chn[1].fs_chn_attr.i2dattr.rotate_angle = 270;
    chn[1].fs_chn_attr.pixFmt = PIX_FMT_NV12;
    chn[1].fs_chn_attr.outFrmRateNum = FIRST_SENSOR_FRAME_RATE_NUM;
    chn[1].fs_chn_attr.outFrmRateDen = FIRST_SENSOR_FRAME_RATE_DEN;
    chn[1].fs_chn_attr.nrVBs = 2;
    chn[1].fs_chn_attr.type = FS_PHY_CHANNEL;
    chn[1].fs_chn_attr.crop.enable = FIRST_CROP_EN;
    chn[1].fs_chn_attr.crop.top = 0;
    chn[1].fs_chn_attr.crop.left = 0;
    chn[1].fs_chn_attr.crop.width = FIRST_SENSOR_WIDTH_SECOND;
    chn[1].fs_chn_attr.crop.height = FIRST_SENSOR_HEIGHT_SECOND;
    chn[1].fs_chn_attr.scaler.enable = 1;
    chn[1].fs_chn_attr.scaler.outwidth = FIRST_SENSOR_WIDTH_SECOND;
    chn[1].fs_chn_attr.scaler.outheight = FIRST_SENSOR_HEIGHT_SECOND;
    chn[1].fs_chn_attr.picWidth = FIRST_SENSOR_WIDTH_SECOND;
    chn[1].fs_chn_attr.picHeight = FIRST_SENSOR_HEIGHT_SECOND;
    chn[1].framesource_chn = { DEV_ID_FS, CH1_INDEX, 0 };
    chn[1].imp_encoder = { DEV_ID_ENC, CH1_INDEX, 0 };

    // 初始化IMP系统
    int ret = IMP_System_Init();
    if (ret < 0) {
        IMP_LOG_ERR("IngenicVideoSource", "IMP_System_Init failed\n");
        return -1;
    }

    // 配置传感器信息
    strcpy(sensor_info[0].name, FIRST_SNESOR_NAME);
    sensor_info[0].i2c.type = TX_SENSOR_CONTROL_INTERFACE_I2C;
    strcpy(sensor_info[0].i2c.i2c.name, FIRST_SNESOR_NAME);
    sensor_info[0].i2c.i2c.addr = FIRST_I2C_ADDR;
    sensor_info[0].i2c.i2c.i2c_adapter_id = FIRST_I2C_ADAPTER_ID;
    sensor_info[0].rst_pin = FIRST_RST_GPIO;
    sensor_info[0].pwdn_pin = FIRST_PWDN_GPIO;
    sensor_info[0].power_pin = FIRST_POWER_GPIO;
    sensor_info[0].sensor_id = FIRST_SENSOR_ID;
    sensor_info[0].mclk = FIRST_MCLK;
    sensor_info[0].default_boot = FIRST_DEFAULT_BOOT;

    // 初始化ISP
    ret = IMP_ISP_Open();
    if (ret < 0) {
        IMP_LOG_ERR("IngenicVideoSource", "IMP_ISP_Open failed\n");
        return -1;
    }

    ret = IMP_ISP_AddSensor(IMPVI_MAIN, &sensor_info[0]);
    if (ret < 0) {
        IMP_LOG_ERR("IngenicVideoSource", "IMP_ISP_AddSensor failed\n");
        return -1;
    }

    ret = IMP_ISP_EnableSensor(IMPVI_MAIN);
    if (ret < 0) {
        IMP_LOG_ERR("IngenicVideoSource", "IMP_ISP_EnableSensor failed\n");
        return -1;
    }

    ret = IMP_ISP_EnableTuning();
    if (ret < 0) {
        IMP_LOG_ERR("IngenicVideoSource", "IMP_ISP_EnableTuning failed\n");
        return -1;
    }

    // 设置传感器帧率
    IMPISPSensorFps fpsAttr;
    fpsAttr.num = FIRST_SENSOR_FRAME_RATE_NUM;
    fpsAttr.den = FIRST_SENSOR_FRAME_RATE_DEN;
    ret = IMP_ISP_Tuning_SetSensorFPS(0, &fpsAttr);
    if (ret < 0) {
        IMP_LOG_ERR("IngenicVideoSource", "IMP_ISP_Tuning_SetSensorFPS failed\n");
        return -1;
    }

    IMP_LOG_DBG("IngenicVideoSource", "sample_system_init success\n");
    return 0;
}

int sample_system_exit() {
    IMP_LOG_DBG("IngenicVideoSource", "sample_system_exit start\n");

    int ret = 0;

    // 退出IMP系统
    ret = IMP_System_Exit();
    if (ret < 0) {
        IMP_LOG_ERR("IngenicVideoSource", "IMP_System_Exit failed\n");
        return -1;
    }

    // 禁用传感器
    ret = IMP_ISP_DisableSensor(IMPVI_MAIN);
    if (ret < 0) {
        IMP_LOG_ERR("IngenicVideoSource", "IMP_ISP_DisableSensor failed\n");
        return -1;
    }

    // 删除传感器
    ret = IMP_ISP_DelSensor(IMPVI_MAIN, &sensor_info[0]);
    if (ret < 0) {
        IMP_LOG_ERR("IngenicVideoSource", "IMP_ISP_DelSensor failed\n");
        return -1;
    }

    // 禁用调优
    ret = IMP_ISP_DisableTuning();
    if (ret < 0) {
        IMP_LOG_ERR("IngenicVideoSource", "IMP_ISP_DisableTuning failed\n");
        return -1;
    }

    // 关闭ISP
    ret = IMP_ISP_Close();
    if (ret < 0) {
        IMP_LOG_ERR("IngenicVideoSource", "IMP_ISP_Close failed\n");
        return -1;
    }

    IMP_LOG_DBG("IngenicVideoSource", "sample_system_exit success\n");
    return 0;
}

int sample_framesource_init() {
    IMP_LOG_DBG("IngenicVideoSource", "sample_framesource_init start\n");

    int i, ret;

    for (i = 0; i < FS_CHN_NUM; i++) {
        if (chn[i].enable) {
            ret = IMP_FrameSource_CreateChn(chn[i].index, &chn[i].fs_chn_attr);
            if (ret < 0) {
                IMP_LOG_ERR("IngenicVideoSource", "IMP_FrameSource_CreateChn(chn%d) error !\n", chn[i].index);
                return -1;
            }

            ret = IMP_FrameSource_SetChnAttr(chn[i].index, &chn[i].fs_chn_attr);
            if (ret < 0) {
                IMP_LOG_ERR("IngenicVideoSource", "IMP_FrameSource_SetChnAttr(chn%d) error !\n", chn[i].index);
                return -1;
            }
        }
    }

    IMP_LOG_DBG("IngenicVideoSource", "sample_framesource_init success\n");
    return 0;
}

int sample_framesource_exit() {
    IMP_LOG_DBG("IngenicVideoSource", "sample_framesource_exit start\n");

    int i, ret;

    // 销毁framesource通道
    for (i = 0; i < FS_CHN_NUM; i++) {
        if (chn[i].enable) {
            ret = IMP_FrameSource_DestroyChn(chn[i].index);
            if (ret < 0) {
                IMP_LOG_ERR("IngenicVideoSource", "IMP_FrameSource_DestroyChn(%d) error: %d\n", chn[i].index, ret);
                return -1;
            }
        }
    }

    IMP_LOG_DBG("IngenicVideoSource", "sample_framesource_exit success\n");
    return 0;
}

// 添加编码器和FrameSource的绑定函数
int sample_encoder_bind_framesource() {
    IMP_LOG_DBG("IngenicVideoSource", "sample_encoder_bind_framesource start\n");
    
    int i, ret;
    
    for (i = 0; i < FS_CHN_NUM; i++) {
        if (chn[i].enable) {
            // 绑定FrameSource通道和编码器通道
            ret = IMP_System_Bind(&chn[i].framesource_chn, &chn[i].imp_encoder);
            if (ret < 0) {
                IMP_LOG_ERR("IngenicVideoSource", "IMP_System_Bind FrameSource(%d) to Encoder(%d) failed: %d\n", 
                           chn[i].framesource_chn.devID, chn[i].imp_encoder.devID, ret);
                return -1;
            }
            IMP_LOG_DBG("IngenicVideoSource", "Successfully bound FrameSource(%d) to Encoder(%d)\n", 
                       chn[i].framesource_chn.devID, chn[i].imp_encoder.devID);
        }
    }
    
    IMP_LOG_DBG("IngenicVideoSource", "sample_encoder_bind_framesource success\n");
    return 0;
}

// 添加编码器和FrameSource的解绑函数
int sample_encoder_unbind_framesource() {
    IMP_LOG_DBG("IngenicVideoSource", "sample_encoder_unbind_framesource start\n");
    
    int i, ret;
    
    for (i = 0; i < FS_CHN_NUM; i++) {
        if (chn[i].enable) {
            // 解绑FrameSource通道和编码器通道
            ret = IMP_System_UnBind(&chn[i].framesource_chn, &chn[i].imp_encoder);
            if (ret < 0) {
                IMP_LOG_ERR("IngenicVideoSource", "IMP_System_UnBind FrameSource(%d) to Encoder(%d) failed: %d\n", 
                           chn[i].framesource_chn.devID, chn[i].imp_encoder.devID, ret);
                // 继续尝试解绑其他通道
            }
        }
    }
    
    IMP_LOG_DBG("IngenicVideoSource", "sample_encoder_unbind_framesource success\n");
    return 0;
}

int sample_encoder_init() {
    IMP_LOG_DBG("IngenicVideoSource", "sample_encoder_init start\n");

    int i, ret, chnNum;
    IMPEncoderChnAttr channel_attr;
    IMPEncoderRcAttr rcAttr;

    for (i = 0; i < FS_CHN_NUM; i++) {
        if (chn[i].enable) {
            chnNum = chn[i].index;

            // 创建编码器组
            ret = IMP_Encoder_CreateGroup(chnNum);
            if (ret < 0) {
                IMP_LOG_ERR("IngenicVideoSource", "IMP_Encoder_CreateGroup(%d) error !\n", chnNum);
                return -1;
            }

            // 设置编码器通道属性
            memset(&channel_attr, 0, sizeof(IMPEncoderChnAttr));
            channel_attr.encoderType = IMP_ENC_TYPE_H264;
            channel_attr.attrH264.profile = chn[i].payloadType;
            channel_attr.attrH264.level = IMP_ENC_LEVEL_H264_40;
            channel_attr.attrH264.gopLength = 30;  // GOP大小，可根据需要调整
            
            // 根据通道索引设置不同的码率
            if (chnNum == 0) {
                // 主通道（1920x1080）
                channel_attr.attrH264.bps = 4000 * 1000;  // 4Mbps
                channel_attr.attrH264.maxBps = 4000 * 1000 * 1.2;  // 最大4.8Mbps
                rcAttr.attrCbr.uTargetBitRate = 4000 * 1000;
                rcAttr.attrCbr.uMaxBitRate = 4000 * 1000 * 1.2;
                rcAttr.attrCbr.uMaxPictureSize = 4000 * 1000 * 4 / 3;
            } else {
                // 次通道（1280x720）
                channel_attr.attrH264.bps = BITRATE_720P_Kbs * 1000;  // 2Mbps
                channel_attr.attrH264.maxBps = BITRATE_720P_Kbs * 1000 * 1.2;  // 最大2.4Mbps
                rcAttr.attrCbr.uTargetBitRate = BITRATE_720P_Kbs * 1000;
                rcAttr.attrCbr.uMaxBitRate = BITRATE_720P_Kbs * 1000 * 1.2;
                rcAttr.attrCbr.uMaxPictureSize = BITRATE_720P_Kbs * 1000 * 4 / 3;
            }
            
            channel_attr.attrH264.width = chn[i].fs_chn_attr.picWidth;
            channel_attr.attrH264.height = chn[i].fs_chn_attr.picHeight;
            channel_attr.attrH264.fps = chn[i].fs_chn_attr.outFrmRateNum;
            channel_attr.attrH264.rcMode = IMP_ENC_RC_MODE_CBR;

            // 设置码率控制属性
            memset(&rcAttr, 0, sizeof(IMPEncoderRcAttr));
            rcAttr.rcMode = IMP_ENC_RC_MODE_CBR;
            rcAttr.attrCbr.iInitialQP = -1;
            rcAttr.attrCbr.iMinQP = 10;
            rcAttr.attrCbr.iMaxQP = 51;
            rcAttr.attrCbr.iIPDelta = -1;
            rcAttr.attrCbr.iPBDelta = -1;
            rcAttr.attrCbr.eRcOptions = IMP_ENC_RC_SCN_CHG_RES | IMP_ENC_RC_OPT_SC_PREVENTION;
            rcAttr.attrCbr.uMaxPSNR = 42;

            // 设置码率控制属性
            ret = IMP_Encoder_SetChnRcAttr(chnNum, &rcAttr);
            if (ret < 0) {
                IMP_LOG_ERR("IngenicVideoSource", "IMP_Encoder_SetChnRcAttr(%d) error !\n", chnNum);
                return -1;
            }

            // 创建编码器通道
            ret = IMP_Encoder_CreateChn(chnNum, &channel_attr);
            if (ret < 0) {
                IMP_LOG_ERR("IngenicVideoSource", "IMP_Encoder_CreateChn(%d) error !\n", chnNum);
                return -1;
            }

            // 注册编码器通道
            ret = IMP_Encoder_RegisterChn(chn[i].index, chnNum);
            if (ret < 0) {
                IMP_LOG_ERR("IngenicVideoSource", "IMP_Encoder_RegisterChn(%d, %d) error: %d\n", chn[i].index, chnNum, ret);
                return -1;
            }
        }
    }

    IMP_LOG_DBG("IngenicVideoSource", "sample_encoder_init success\n");
    return 0;
}

int sample_encoder_exit() {
    IMP_LOG_DBG("IngenicVideoSource", "sample_encoder_exit start\n");

    int ret = 0, i = 0, chnNum = 0;
    IMPEncoderChnStat chn_stat;

    for (i = 0; i < FS_CHN_NUM; i++) {
        if (chn[i].enable) {
            chnNum = chn[i].index;
            memset(&chn_stat, 0, sizeof(IMPEncoderChnStat));
            ret = IMP_Encoder_Query(chnNum, &chn_stat);
            if (ret < 0) {
                IMP_LOG_ERR("IngenicVideoSource", "IMP_Encoder_Query(%d) error: %d\n", chnNum, ret);
                return -1;
            }

            if (chn_stat.registered) {
                ret = IMP_Encoder_UnRegisterChn(chnNum);
                if (ret < 0) {
                    IMP_LOG_ERR("IngenicVideoSource", "IMP_Encoder_UnRegisterChn(%d) error: %d\n", chnNum, ret);
                    return -1;
                }

                ret = IMP_Encoder_DestroyChn(chnNum);
                if (ret < 0) {
                    IMP_LOG_ERR("IngenicVideoSource", "IMP_Encoder_DestroyChn(%d) error: %d\n", chnNum, ret);
                    return -1;
                }

                ret = IMP_Encoder_DestroyGroup(chnNum);
                if (ret < 0) {
                    IMP_LOG_ERR("IngenicVideoSource", "IMP_Encoder_DestroyGroup(%d) error: %d\n", chnNum, ret);
                    return -1;
                }
            }
        }
    }

    IMP_LOG_DBG("IngenicVideoSource", "sample_encoder_exit success\n");
    return 0;
}

int sample_framesource_streamon() {
    int ret = 0, i = 0;

    // 启用framesource通道
    for (i = 0; i < FS_CHN_NUM; i++) {
        if (chn[i].enable) {
            ret = IMP_FrameSource_EnableChn(chn[i].index);
            if (ret < 0) {
                IMP_LOG_ERR("IngenicVideoSource", "IMP_FrameSource_EnableChn failed, error chn index: %d\n", chn[i].index);
                return -1;
            }
        }
    }

    return 0;
}

int sample_framesource_streamoff() {
    int ret = 0, i = 0;

    // 禁用framesource通道
    for (i = 0; i < FS_CHN_NUM; i++) {
        if (chn[i].enable) {
            ret = IMP_FrameSource_DisableChn(chn[i].index);
            if (ret < 0) {
                IMP_LOG_ERR("IngenicVideoSource", "IMP_FrameSource_DisableChn failed, error chn index: %d\n", chn[i].index);
                return -1;
            }
        }
    }

    return 0;
}

// 视频流获取线程
static void* get_video_stream_thread(void* args) {
    int val, chnNum, ret;
    val = (int)args;
    chnNum = val & 0xffff;
    
    // 添加线程标识，便于调试
    IMP_LOG_DBG("IngenicVideoSource", "Starting video stream thread for channel %d\n", chnNum);

    // 开始接收图像
    ret = IMP_Encoder_StartRecvPic(chnNum);
    if (ret < 0) {
        IMP_LOG_ERR("IngenicVideoSource", "IMP_Encoder_StartRecvPic(%d) failed\n", chnNum);
        return ((void*)-1);
    }

    int frameCount = 0;
    int lastSecond = 0;
    int fps = 0;

    // 使用全局运行标志
    while (g_running) {
        // 轮询流数据，超时时间1000ms
        ret = IMP_Encoder_PollingStream(chnNum, 1000);
        if (ret < 0) {
            if (ret != IMP_VIDEO_ENCODER_TIME_OUT) {
                IMP_LOG_ERR("IngenicVideoSource", "IMP_Encoder_PollingStream(%d) error: %d\n", chnNum, ret);
                // 添加重试机制
                usleep(10000); // 10ms延迟后重试
            }
            continue;
        }

        // 获取编码流数据
        IMPEncoderStream stream;
        memset(&stream, 0, sizeof(IMPEncoderStream));
        ret = IMP_Encoder_GetStream(chnNum, &stream, 1);
        if (ret < 0) {
            IMP_LOG_ERR("IngenicVideoSource", "IMP_Encoder_GetStream(%d) failed, error: %d\n", chnNum, ret);
            continue;
        }

        // 帧率统计
        frameCount++;
        int currentSecond = time(NULL);
        if (currentSecond != lastSecond) {
            fps = frameCount;
            frameCount = 0;
            lastSecond = currentSecond;
            IMP_LOG_DBG("IngenicVideoSource", "Channel %d FPS: %d\n", chnNum, fps);
        }

        // 处理所有启用通道的数据，传递通道号
        IMP_LOG_DBG("IngenicVideoSource", "Got stream from channel %d with %d packets\n", chnNum, stream.packCount);
        IngenicPutH264DataToBuffer(&stream, chnNum);

        // 释放流数据
        ret = IMP_Encoder_ReleaseStream(chnNum, &stream);
        if (ret < 0) {
            IMP_LOG_ERR("IngenicVideoSource", "IMP_Encoder_ReleaseStream(%d) failed\n", chnNum);
        }
    }

    // 停止接收图像
    ret = IMP_Encoder_StopRecvPic(chnNum);
    if (ret < 0) {
        IMP_LOG_ERR("IngenicVideoSource", "IMP_Encoder_StopRecvPic(%d) failed\n", chnNum);
    }
    
    IMP_LOG_DBG("IngenicVideoSource", "Video stream thread for channel %d exited\n", chnNum);
    return ((void*)0);
}

int sample_get_video_stream() {
    unsigned int i;
    int ret;
    pthread_t tid[FS_CHN_NUM];

    for (i = 0; i < FS_CHN_NUM; i++) {
        if (chn[i].enable) {
            int arg = chn[i].index;
            ret = pthread_create(&tid[i], NULL, get_video_stream_thread, (void*)arg);
            if (ret < 0) {
                IMP_LOG_ERR("IngenicVideoSource", "Create ChnNum%d get_video_stream_thread failed\n", chn[i].index);
            }
        }
    }

    for (i = 0; i < FS_CHN_NUM; i++) {
        if (chn[i].enable) {
            pthread_join(tid[i], NULL);
        }
    }

    return 0;
}

// 保存SPS/PPS数据的结构体
struct H264HeaderInfo {
    std::vector<unsigned char> sps_data;  // SPS数据
    std::vector<unsigned char> pps_data;  // PPS数据
    bool valid;                           // 数据是否有效
    std::chrono::steady_clock::time_point timestamp; // 最后更新时间
};

// 为每个通道保存SPS/PPS数据
static H264HeaderInfo g_h264Headers[FS_CHN_NUM];

// H264数据缓冲区处理函数 - 支持多通道
int IngenicPutH264DataToBuffer(IMPEncoderStream* stream, int channelNum = 0) {
    // 检查运行状态
    if (!g_running) {
        return -1;
    }
    
    // 检查通道号有效性
    if (channelNum < 0 || channelNum >= FS_CHN_NUM) {
        IMP_LOG_ERR("IngenicVideoSource", "Invalid channel number: %d\n", channelNum);
        channelNum = 0; // 默认使用通道0
    }
    
    IMP_LOG_DBG("IngenicVideoSource", "Received H264 stream from channel %d with %d packets\n", 
               channelNum, stream->packCount);
    
    if (!stream || stream->packCount == 0 || !stream->pack) {
        IMP_LOG_ERR("IngenicVideoSource", "Invalid H264 stream data\n");
        return -1;
    }
    
    // 使用全局实例指针
    if (!g_globalVideoSource) {
        IMP_LOG_ERR("IngenicVideoSource", "No active IngenicVideoSource instance\n");
        return -1;
    }
    
    // 处理每个H264数据包
    for (int i = 0; i < stream->packCount; i++) {
        IMPEncoderPack* pack = &stream->pack[i];
        
        // 验证数据包有效性
        if (!pack || !pack->virAddr || pack->length <= 0) {
            IMP_LOG_ERR("IngenicVideoSource", "Invalid packet %d: null address or zero length\n", i);
            continue;
        }
        
        unsigned char* data = reinterpret_cast<unsigned char*>(pack->virAddr);
        
        // 处理SPS
        if (pack->isSPS) {
            IMP_LOG_DBG("IngenicVideoSource", "Found SPS packet at packet %d, length: %d\n", i, pack->length);
            // 保存SPS数据
            g_h264Headers[channelNum].sps_data.assign(data, data + pack->length);
            g_h264Headers[channelNum].valid = true;
            g_h264Headers[channelNum].timestamp = std::chrono::steady_clock::now();
            
            // SPS作为独立帧发送，用于解码器初始化
            g_globalVideoSource->addFrameData(data, pack->length, 3, channelNum);
        }
        // 处理PPS
        else if (pack->isPPS) {
            IMP_LOG_DBG("IngenicVideoSource", "Found PPS packet at packet %d, length: %d\n", i, pack->length);
            // 保存PPS数据
            g_h264Headers[channelNum].pps_data.assign(data, data + pack->length);
            g_h264Headers[channelNum].valid = true;
            g_h264Headers[channelNum].timestamp = std::chrono::steady_clock::now();
            
            // PPS作为独立帧发送
            g_globalVideoSource->addFrameData(data, pack->length, 4, channelNum);
        }
        // 处理视频帧
        else {
            // 判断帧类型
            unsigned frameType = 1; // 默认P帧
            if (pack->isIDR) {
                frameType = 0; // I帧 (IDR帧)
                IMP_LOG_DBG("IngenicVideoSource", "Found IDR frame at packet %d, length: %d\n", i, pack->length);
                
                // 如果是IDR帧，确保先发送最新的SPS和PPS（如果有）
                if (g_h264Headers[channelNum].valid) {
                    if (!g_h264Headers[channelNum].sps_data.empty()) {
                        g_globalVideoSource->addFrameData(
                            g_h264Headers[channelNum].sps_data.data(), 
                            g_h264Headers[channelNum].sps_data.size(), 
                            3, // SPS
                            channelNum
                        );
                    }
                    if (!g_h264Headers[channelNum].pps_data.empty()) {
                        g_globalVideoSource->addFrameData(
                            g_h264Headers[channelNum].pps_data.data(), 
                            g_h264Headers[channelNum].pps_data.size(), 
                            4, // PPS
                            channelNum
                        );
                    }
                }
            }
            
            // 为帧数据添加H.264 start code (0x00 0x00 0x00 0x01)
            const unsigned char startCode[4] = {0x00, 0x00, 0x00, 0x01};
            size_t totalSize = sizeof(startCode) + pack->length;
            
            // 分配缓冲区
            std::vector<unsigned char> framedData(totalSize);
            
            // 复制start code和数据
            memcpy(framedData.data(), startCode, sizeof(startCode));
            memcpy(framedData.data() + sizeof(startCode), data, pack->length);
            
            // 添加帧数据到队列，传递通道号
            g_globalVideoSource->addFrameData(
                framedData.data(), 
                framedData.size(), 
                frameType, 
                channelNum
            );
        }
    }
    
    return 0;
}

// 全局实例指针，用于IngenicPutH264DataToBuffer函数访问
static IngenicVideoSource* g_globalVideoSource = nullptr;

// 全局运行标志，用于优雅退出
static volatile bool g_running = true;

// 初始化H264头部信息
static void initializeH264Headers() {
    for (int i = 0; i < FS_CHN_NUM; i++) {
        g_h264Headers[i].valid = false;
        g_h264Headers[i].sps_data.clear();
        g_h264Headers[i].pps_data.clear();
    }
}

// 在程序启动时初始化
__attribute__((constructor))
static void initIngenicVideoSource() {
    initializeH264Headers();
}

IngenicVideoSource* IngenicVideoSource::createNew(UsageEnvironment& env) {
    IngenicVideoSource* source = new IngenicVideoSource(env);
    // 设置全局实例指针
    g_globalVideoSource = source;
    return source;
}

IngenicVideoSource::IngenicVideoSource(UsageEnvironment& env)
    : FramedSource(env),
    m_captureRunning(false),
    m_frameCount(0),
    m_width(1920),
    m_height(1080),
    m_fps(25),
    m_bitrate(2000),
    m_eventTriggerId(0),
    m_isCurrentlyAwaitingData(false),
    m_encoderReady(false),
    m_showTimestamp(true),
    m_showLatency(true),
    m_frameLatency(0),
    m_frameIdCounter(0),
    m_markerChangeCount(0),
    m_realEndToEndLatency(0),
    m_ingenicSystemInitialized(false),
    m_totalFrames(0),
    m_droppedFrames(0) {

    // 初始化延时标记
    m_latencyMarker.x = 100;
    m_latencyMarker.y = 100;
    m_latencyMarker.size = 40;
    m_latencyMarker.color = RGB(255, 0, 0); // 红色
    m_latencyMarker.changeTime = std::chrono::steady_clock::now();
    m_latencyMarker.sequence = 0;
    m_lastMarkerChange = std::chrono::steady_clock::now();

    // 记录启动时间
    m_startTime = std::chrono::steady_clock::now();
    m_lastFrameTime = m_startTime;

    pthread_mutex_init(&m_cs, NULL);

    m_eventTriggerId = envir().taskScheduler().createEventTrigger(deliverFrame0);

    // 初始化君正系统
    if (initializeIngenicSystem()) {
        envir() << "IngenicVideoSource: Started - Resolution: " << m_width << "x" << m_height << "\n";
        startCapturing();
    } else {
        envir() << "IngenicVideoSource: Failed to initialize Ingenic system\n";
    }
}

IngenicVideoSource::~IngenicVideoSource() {
    // 设置停止标志
    g_running = false;
    
    // 清理资源
    stopCapturing();
    cleanupIngenicSystem();
    
    // 清空帧队列
    pthread_mutex_lock(&m_cs);
    m_frameQueue.clear();
    pthread_mutex_unlock(&m_cs);
    
    // 销毁互斥锁
    pthread_mutex_destroy(&m_cs);
    pthread_mutex_destroy(&m_ingenicMutex);
    
    // 清除全局实例指针
    if (g_globalVideoSource == this) {
        g_globalVideoSource = nullptr;
    }
    
    // 打印统计信息
    IMP_LOG_DBG("IngenicVideoSource", "Total frames processed: %d, dropped frames: %d\n", 
               m_totalFrames, m_droppedFrames);
    
    envir() << "IngenicVideoSource: Stopped\n";
}

bool IngenicVideoSource::initializeIngenicSystem() {
    std::lock_guard<std::mutex> lock(m_ingenicMutex);
    
    if (m_ingenicSystemInitialized) {
        return true;
    }
    
    // 初始化君正系统
    if (sample_system_init() != 0) {
        envir() << "IngenicVideoSource: Failed to initialize Ingenic system\n";
        return false;
    }
    
    // 初始化帧源
    if (sample_framesource_init() != 0) {
        envir() << "IngenicVideoSource: Failed to initialize framesource\n";
        sample_system_exit();
        return false;
    }
    
    // 初始化编码器
    if (sample_encoder_init() != 0) {
        envir() << "IngenicVideoSource: Failed to initialize encoder\n";
        sample_framesource_exit();
        sample_system_exit();
        return false;
    }
    
    // 绑定FrameSource和编码器
    if (sample_encoder_bind_framesource() != 0) {
        envir() << "IngenicVideoSource: Failed to bind framesource and encoder\n";
        sample_encoder_exit();
        sample_framesource_exit();
        sample_system_exit();
        return false;
    }
    
    // 启动视频流
    if (sample_framesource_streamon() != 0) {
        envir() << "IngenicVideoSource: Failed to start framesource stream\n";
        sample_encoder_unbind_framesource();
        sample_encoder_exit();
        sample_framesource_exit();
        sample_system_exit();
        return false;
    }
    
    m_ingenicSystemInitialized = true;
    m_encoderReady = true;
    
    return true;
}

void IngenicVideoSource::cleanupIngenicSystem() {
    std::lock_guard<std::mutex> lock(m_ingenicMutex);
    
    if (m_ingenicSystemInitialized) {
        // 停止视频流
        sample_framesource_streamoff();
        
        // 解绑FrameSource和编码器
        sample_encoder_unbind_framesource();
        
        // 清理编码器资源
        sample_encoder_exit();
        
        // 清理帧源资源
        sample_framesource_exit();
        
        // 退出系统
        sample_system_exit();
        
        m_ingenicSystemInitialized = false;
        m_encoderReady = false;
    }
}

void IngenicVideoSource::startCapturing() {
    if (m_captureRunning) return;

    m_captureRunning = true;
    int ret = pthread_create(&m_captureThread, NULL, captureThread, this);

    if (ret != 0) {
        envir() << "IngenicVideoSource: Thread failed\n";
        m_captureRunning = false;
    }
}

void IngenicVideoSource::stopCapturing() {
    m_captureRunning = false;

    if (m_eventTriggerId != 0) {
        envir().taskScheduler().deleteEventTrigger(m_eventTriggerId);
        m_eventTriggerId = 0;
    }

    if (m_captureThread) {
        pthread_join(m_captureThread, NULL);
        m_captureThread = 0;
    }
}

void* IngenicVideoSource::captureThread(void* clientData) {
    IngenicVideoSource* source = (IngenicVideoSource*)clientData;
    if (!source) return NULL;

    // 等待编码器初始化完成
    int retryCount = 0;
    while (!source->m_encoderReady && retryCount < 50) {
        usleep(10000); // 10ms
        retryCount++;
    }

    // 启动视频流获取线程
    sample_get_video_stream();

    return NULL;
}

void IngenicVideoSource::doGetNextFrame() {
    if (!m_captureRunning) return;
    m_isCurrentlyAwaitingData = true;

    fMaxSize = 200000; // 增加最大帧大小

    pthread_mutex_lock(&m_cs);
    bool hasData = !m_frameQueue.empty();
    pthread_mutex_unlock(&m_cs);
    if (hasData) deliverFrame();
}

void IngenicVideoSource::deliverFrame0(void* clientData) {
    IngenicVideoSource* source = (IngenicVideoSource*)clientData;
    if (source && source->m_captureRunning) {
        source->deliverFrame();
    }
}

void IngenicVideoSource::deliverFrame() {
    if (!m_captureRunning) return;
    if (!isCurrentlyAwaitingData()) return;

    pthread_mutex_lock(&m_cs);
    if (m_frameQueue.empty()) {
        pthread_mutex_unlock(&m_cs);
        // 队列空时，设置下一次轮询
        nextTask() = envir().taskScheduler().scheduleDelayedTask(10000, deliverFrame0, this);
        IMP_LOG_DBG("IngenicVideoSource", "Frame queue empty, scheduling next poll\n");
        return;
    }

    FrameData frameData = std::move(m_frameQueue.front());
    m_frameQueue.pop_front();

    // 移除已发送帧的时间戳记录
    auto it = m_frameTimestamps.find(frameData.frameId);
    if (it != m_frameTimestamps.end()) {
        m_frameTimestamps.erase(it);
    }

    pthread_mutex_unlock(&m_cs);

    // 帧类型信息
    const char* frameTypeStr = "Unknown";
    switch (frameData.frameType) {
        case 0: frameTypeStr = "I-Frame";
            break;
        case 1: frameTypeStr = "P-Frame";
            break;
        case 2: frameTypeStr = "B-Frame";
            break;
        case 3: frameTypeStr = "SPS";
            break;
        case 4: frameTypeStr = "PPS";
            break;
    }

    // 安全地拷贝数据
    unsigned maxSize = fMaxSize;
    unsigned actualCopySize = (frameData.data.size() < maxSize) ? frameData.data.size() : maxSize;

    // 关键帧或元数据帧详细日志
    if (frameData.frameType == 0 || frameData.frameType == 3 || frameData.frameType == 4) {
        IMP_LOG_DBG("IngenicVideoSource", "Delivering %s from channel %d, size: %u, frameId: %llu\n", 
                   frameTypeStr, frameData.channel, actualCopySize, frameData.frameId);
    }

    if (actualCopySize > 0) {
        memcpy(fTo, frameData.data.data(), actualCopySize);
    }

    fFrameSize = actualCopySize;
    
    // 如果帧被截断，记录警告
    if (frameData.data.size() > maxSize) {
        fNumTruncatedBytes = frameData.data.size() - maxSize;
        IMP_LOG_WARN("IngenicVideoSource", "Frame truncated: %u bytes lost (frameId: %llu, channel: %d)\n", 
                    fNumTruncatedBytes, frameData.frameId, frameData.channel);
    } else {
        fNumTruncatedBytes = 0;
    }

    // 使用帧捕获时间作为演示时间，而不是当前时间
    struct timeval tv;
    auto duration = frameData.captureTime.time_since_epoch();
    tv.tv_sec = std::chrono::duration_cast<std::chrono::seconds>(duration).count();
    tv.tv_usec = std::chrono::duration_cast<std::chrono::microseconds>(duration).count() % 1000000;
    fPresentationTime = tv;
    
    // 设置帧标记
    fDurationInMicroseconds = 1000000 / m_fps; // 根据帧率计算帧间隔
    
    // 对于关键帧，设置特殊标记
    if (frameData.frameType == 0) { // IDR帧
        fFlags |= EndOfFrameMarker;
    }
    
    // 更新最后一帧时间
    m_lastFrameTime = std::chrono::steady_clock::now();
    
    m_isCurrentlyAwaitingData = false;
    FramedSource::afterGetting(this);
}

// 添加帧数据到队列的方法 - 支持多通道
void IngenicVideoSource::addFrameData(const unsigned char* data, unsigned size, unsigned frameType, int channelNum) {
    // 数据有效性检查
    if (!data || size == 0) {
        IMP_LOG_ERR("IngenicVideoSource", "Invalid data or zero size in addFrameData\n");
        return;
    }
    
    pthread_mutex_lock(&m_cs);
    
    try {
        // 计算队列当前内存占用
        size_t currentMemoryUsage = 0;
        for (const auto& frame : m_frameQueue) {
            currentMemoryUsage += frame.data.size();
        }
        
        // 队列管理策略：基于内存使用和帧数限制
        const size_t MAX_QUEUE_FRAMES = 30;   // 最大帧数
        const size_t MAX_QUEUE_MEMORY = 50 * 1024 * 1024; // 最大内存使用（50MB）
        
        // 当队列满或内存占用过高时，清理旧帧
        while (m_frameQueue.size() >= MAX_QUEUE_FRAMES || 
               (currentMemoryUsage + size > MAX_QUEUE_MEMORY && !m_frameQueue.empty())) {
            
            // 优先清理策略：先清理非关键帧
            bool removedNonKeyFrame = false;
            for (auto it = m_frameQueue.begin(); it != m_frameQueue.end(); ++it) {
                // 非关键帧（P帧或B帧）优先丢弃
                if (it->frameType > 1 && it->frameType != 3 && it->frameType != 4) {
                    currentMemoryUsage -= it->data.size();
                    m_frameQueue.erase(it);
                    m_droppedFrames++;
                    removedNonKeyFrame = true;
                    break;
                }
            }
            
            // 如果没有非关键帧可丢弃，则丢弃最旧的帧
            if (!removedNonKeyFrame && !m_frameQueue.empty()) {
                currentMemoryUsage -= m_frameQueue.front().data.size();
                m_frameQueue.pop_front();
                m_droppedFrames++;
            }
            
            // 记录丢帧警告
            if (m_droppedFrames % 10 == 0) {
                IMP_LOG_WARN("IngenicVideoSource", "High memory usage: dropped %d frames\n", m_droppedFrames);
            }
        }
        
        // 创建新的帧数据
        FrameData frameData;
        frameData.data.resize(size);
        memcpy(frameData.data.data(), data, size);
        frameData.frameId = ++m_frameIdCounter;
        frameData.captureTime = std::chrono::steady_clock::now();
        frameData.frameType = frameType;
        frameData.channel = channelNum;
        
        // 记录时间戳
        m_frameTimestamps[frameData.frameId] = frameData.captureTime;
        
        // 限制时间戳映射的大小，避免内存泄漏
        const size_t MAX_TIMESTAMP_SIZE = 60; // 最多保存60个时间戳记录
        if (m_frameTimestamps.size() > MAX_TIMESTAMP_SIZE) {
            // 删除最旧的时间戳记录
            auto oldest = m_frameTimestamps.begin();
            m_frameTimestamps.erase(oldest);
        }
        
        // 帧类型日志
        const char* frameTypeStr = "Unknown";
        switch (frameType) {
            case 0: frameTypeStr = "I-Frame";
                break;
            case 1: frameTypeStr = "P-Frame";
                break;
            case 2: frameTypeStr = "B-Frame";
                break;
            case 3: frameTypeStr = "SPS";
                break;
            case 4: frameTypeStr = "PPS";
                break;
        }
        
        // 关键帧日志级别提高，便于调试
        if (frameType == 0 || frameType == 3 || frameType == 4) {
            IMP_LOG_DBG("IngenicVideoSource", "Adding %s (ID: %llu, size: %u, channel: %d) to queue\n", 
                      frameTypeStr, frameData.frameId, size, channelNum);
        }
        
        // 添加到队列
        m_frameQueue.push_back(frameData);
        m_totalFrames++;
        
        // 定期打印队列统计信息
        if (m_totalFrames % 100 == 0) {
            IMP_LOG_DBG("IngenicVideoSource", "Queue stats: %zu frames, %zu bytes, %d total frames, %d dropped frames\n", 
                      m_frameQueue.size(), currentMemoryUsage + size, m_totalFrames, m_droppedFrames);
        }
        
        // 触发事件，通知有新数据可用
        if (m_eventTriggerId != 0 && m_isCurrentlyAwaitingData) {
            envir().taskScheduler().triggerEvent(m_eventTriggerId, this);
        }
        
    } catch (const std::exception& e) {
        // 捕获异常，防止崩溃
        IMP_LOG_ERR("IngenicVideoSource", "Exception in addFrameData: %s\n", e.what());
    } finally {
        pthread_mutex_unlock(&m_cs);
    }
    
    // 更新帧计数
    m_frameCount++;
}

// 其他辅助函数的实现...
void IngenicVideoSource::updateLatencyMarker() {
    auto now = std::chrono::steady_clock::now();
    auto timeSinceChange = std::chrono::duration_cast<std::chrono::milliseconds>(now - m_lastMarkerChange);

    if (timeSinceChange.count() >= 500) {
        static COLORREF colors[] = {
            RGB(255, 0, 0),   // 红
            RGB(0, 255, 0),   // 绿  
            RGB(0, 0, 255),   // 蓝
            RGB(255, 255, 0)  // 黄
        };

        m_latencyMarker.color = colors[m_markerChangeCount % 4];
        m_latencyMarker.changeTime = now;
        m_latencyMarker.sequence = ++m_markerChangeCount;
        m_lastMarkerChange = now;

        m_latencyMarker.x = 100 + (m_markerChangeCount % 3) * 5;
        m_latencyMarker.y = 100 + (m_markerChangeCount % 3) * 5;
    }
}

int64_t IngenicVideoSource::calculateRealLatency() {
    auto now = std::chrono::steady_clock::now();

    auto markerLatency = std::chrono::duration_cast<std::chrono::milliseconds>(
        now - m_latencyMarker.changeTime);
    int64_t markerLatencyMs = markerLatency.count();

    int64_t queueLatency = 0;
    pthread_mutex_lock(&m_cs);
    if (!m_frameQueue.empty()) {
        const auto& oldestFrame = m_frameQueue.front();
        auto queueTime = std::chrono::duration_cast<std::chrono::milliseconds>(
            now - oldestFrame.captureTime);
        queueLatency = queueTime.count();
    }
    pthread_mutex_unlock(&m_cs);

    return (markerLatencyMs > queueLatency) ? markerLatencyMs : queueLatency;
}

void IngenicVideoSource::drawTimestampAndLatency(int width, int height) {
    if (!m_showTimestamp && !m_showLatency) return;

    auto now = std::chrono::steady_clock::now();
    auto currentTime = std::chrono::system_clock::now();
    auto duration = currentTime.time_since_epoch();
    auto millis = std::chrono::duration_cast<std::chrono::milliseconds>(duration).count();

    auto frameTimeDiff = std::chrono::duration_cast<std::chrono::milliseconds>(now - m_lastFrameTime);
    m_frameLatency = frameTimeDiff.count();

    m_realEndToEndLatency = calculateRealLatency();

    // 简化为控制台输出
    if (m_showTimestamp || m_showLatency) {
        printf("Ingenic Video Stream - Resolution: %dx%d | FPS: %d | Bitrate: %dkbps | Frames: %lu\n",
            m_width, m_height, m_fps, m_bitrate, m_frameCount);
        
        if (m_showLatency) {
            int64_t queueSize = 0;
            pthread_mutex_lock(&m_cs);
            queueSize = m_frameQueue.size();
            pthread_mutex_unlock(&m_cs);
            
            printf("Latency: %lldms | Capture: %lldms | Queue: %lld\n",
                m_realEndToEndLatency, m_frameLatency, queueSize);
        }
    }
}

void IngenicVideoSource::drawLatencyMarker() {
    // 简化为控制台输出
    printf("Latency Marker: Sequence %u at (%d, %d) size %d\n",
        m_latencyMarker.sequence, m_latencyMarker.x, m_latencyMarker.y, m_latencyMarker.size);
}

void IngenicVideoSource::annexBFromAvcc(const unsigned char* avcc, size_t avccSize,
    std::vector<unsigned char>& outSps, std::vector<unsigned char>& outPps) {
    outSps.clear();
    outPps.clear();
    if (!avcc || avccSize < 7) return;
    
    const unsigned char* p = avcc;
    const unsigned char* end = avcc + avccSize;
    p += 5; if (p >= end) return;
    unsigned char numSps = (*p & 0x1F); p++; if (p > end) return;
    static const unsigned char sc[4] = { 0x00,0x00,0x00,0x01 };
    for (unsigned i = 0; i < numSps && (p + 2 <= end); ++i) {
        uint16_t spsLen = (uint16_t)(p[0] << 8 | p[1]); p += 2;
        if (p + spsLen > end) return;
        outSps.insert(outSps.end(), sc, sc + 4);
        outSps.insert(outSps.end(), p, p + spsLen);
        p += spsLen;
    }
    if (p >= end) return;
    unsigned char numPps = *p; p++;
    for (unsigned i = 0; i < numPps && (p + 2 <= end); ++i) {
        uint16_t ppsLen = (uint16_t)(p[0] << 8 | p[1]); p += 2;
        if (p + ppsLen > end) return;
        outPps.insert(outPps.end(), sc, sc + 4);
        outPps.insert(outPps.end(), p, p + ppsLen);
        p += ppsLen;
    }
}
