#include <stdio.h>

#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavutil/opt.h"

#include "ffmpeg_codec.h"

#define FFCODEC_INFO(argv...) fprintf(stdout, "[FFCODEC_INFO] %s(%d): ", __func__, __LINE__),fprintf(stdout, ##argv)
#define FFCODEC_ERR(argv...) fprintf(stderr, "[FFCODEC_ERR] %s(%d): ", __func__, __LINE__),fprintf(stderr, ##argv)

// ----- 格式枚举对应和互换 -----
typedef struct
{
    FFCodecFormat forcodec;
    enum AVPixelFormat forffmpeg;
} FFCodec_FormatTranslate;

static const FFCodec_FormatTranslate ffcodec_formatGrid[] = 
{
    {CODEC_FORMAT_RGB, AV_PIX_FMT_RGB24},
    {CODEC_FORMAT_BGR, AV_PIX_FMT_BGR24},
    {CODEC_FORMAT_ARGB, AV_PIX_FMT_ARGB},
    {CODEC_FORMAT_ABGR, AV_PIX_FMT_ABGR},
    {CODEC_FORMAT_RGBA, AV_PIX_FMT_RGBA},
    {CODEC_FORMAT_BGRA, AV_PIX_FMT_BGRA},

    {CODEC_FORMAT_YUV420P, AV_PIX_FMT_YUV420P},
    {CODEC_FORMAT_YVU420P, AV_PIX_FMT_YUV420P},

    {CODEC_FORMAT_YUV422P, AV_PIX_FMT_YUV422P},
    {CODEC_FORMAT_YVU422P, AV_PIX_FMT_YUV422P},

    {CODEC_FORMAT_YUV420SP, AV_PIX_FMT_NV12},
    {CODEC_FORMAT_YVU420SP, AV_PIX_FMT_NV21},

    {CODEC_FORMAT_YUV422SP, AV_PIX_FMT_NV16},
    {CODEC_FORMAT_YVU422SP, AV_PIX_FMT_NV16},

    {CODEC_FORMAT_YUV400, AV_PIX_FMT_Y400A},
};

static enum AVPixelFormat ffcodec_to_ffmpeg(FFCodecFormat format)
{
    uint32_t i;
    for (i = 0; i < sizeof(ffcodec_formatGrid) / sizeof(ffcodec_formatGrid[0]); i++)
    {
        if (ffcodec_formatGrid[i].forcodec == format)
            return ffcodec_formatGrid[i].forffmpeg;
    }
    FFCODEC_ERR("Unsupported format(%d) !! \n", format);
    return AV_PIX_FMT_NONE;
}

static FFCodecFormat ffmpeg_to_ffcodec(enum AVPixelFormat format)
{
    uint32_t i;
    for (i = 0; i < sizeof(ffcodec_formatGrid) / sizeof(ffcodec_formatGrid[0]); i++)
    {
        if (ffcodec_formatGrid[i].forffmpeg == format)
            return ffcodec_formatGrid[i].forcodec;
    }
    FFCODEC_ERR("Unsupported format(%d) !! \n", format);
    return CODEC_FORMAT_NONE;
}

// ----- ffmpeg编解码 -----

// char* ffcodec_info(void)
// {
//     const size_t size = 2048;
//     static char info[size] = {};
//     size_t len = 0;
//     memset(info, 0, size);
//     avcodec_register_all();
//     AVInputFormat *if_temp = av_iformat_next(NULL);
//     while (if_temp != NULL){
//         len += snprintf(&info[len], size - len, "ffInput: %s\r\n", if_temp->name);
//         if_temp = if_temp->next;
//     }
//     AVOutputFormat *of_temp = av_oformat_next(NULL);
//     while (of_temp != NULL){
//         len += snprintf(&info[len], size - len, "ffOutput: %s\r\n", of_temp->name);
//         of_temp = of_temp->next;
//     }
//     if (len == 0)
//         strcpy(info, "There's no ffmpeg working info\r\n");
//     return info;
// }

typedef struct
{
    FFCodecType type;
    FFCodecFormat format;
    AVCodecContext *pAVCodecContext;
    AVFrame *frameCodec;
    AVFrame *frameYUV;
    AVPacket packet;
} FFCodec_Priv;

/*
 *  视频编解码初始化
 *  参数:
 *      type: 编码还是解码
 *      format: 被编解/码数据的格式
 *      inputFormat: 编码时使用,其它时候填 CODEC_FORMAT_NONE 即可
 *      groupSize: 一组图像中的图像数量,图像运动剧烈可以设小点,当然数据量也会变大
 *      bFrame: 启用B帧数量,B帧可以提高压缩率,但依赖前后帧解码(不适用于网络直播),写0不启用
 *  返回: 控制句柄, NULL失败
 */
void* ffcodec_init(
    FFCodecType type,
    FFCodecFormat format,
    FFCodecFormat inputFormat,
    uint32_t width,
    uint32_t height,
    uint32_t fps,
    uint32_t groupSize,
    uint32_t bFrame)
{
    avcodec_register_all();

    AVCodec *pAVCodec;
    if (type == CODEC_TYPE_DECODE && format == CODEC_FORMAT_H264)
        pAVCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
    else if (type == CODEC_TYPE_DECODE && format == CODEC_FORMAT_H265)
        pAVCodec = avcodec_find_decoder(AV_CODEC_ID_H265);
    else if (type == CODEC_TYPE_ENCODE && format == CODEC_FORMAT_H264)
        pAVCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
    else if (type == CODEC_TYPE_ENCODE && format == CODEC_FORMAT_H265)
        pAVCodec = avcodec_find_encoder(AV_CODEC_ID_H265);
    else
    {
        FFCODEC_ERR("Unsupported Encode/Decode(%d-%d) action !! \n", type, format);
        return NULL;
    }

    if (!pAVCodec)
    {
        FFCODEC_ERR("avcodec_find_en/decoder(%d-%d) failed \n", type, format);
        return NULL;
    }

    AVCodecContext *pAVCodecContext = avcodec_alloc_context3(pAVCodec);
    if (!pAVCodecContext)
    {
        FFCODEC_INFO("avcodec_alloc_context3(%d-%d) failed \n", type, format);
        return NULL;
    }

    if (type == CODEC_TYPE_ENCODE)
    {
        pAVCodecContext->pix_fmt = ffcodec_to_ffmpeg(inputFormat);
    
        pAVCodecContext->bit_rate = 0; //400000;

        pAVCodecContext->width = width;
        pAVCodecContext->height = height;

        pAVCodecContext->time_base = (AVRational){1, (int)fps};
        pAVCodecContext->framerate = (AVRational){(int)fps, 1};

        pAVCodecContext->gop_size = groupSize; // 每组图像帧数量
        pAVCodecContext->max_b_frames = bFrame; // b帧数量,0不启用

        // preset(速度,越快则质量低):
        // "ultrafast", "superfast", "veryfast", "faster", "fast", "medium", "slow", "slower", "veryslow", "placebo", 0
        // av_opt_set(pAVCodecContext->priv_data, "preset", "superfast", 0); 

        // tune(场景,zerolatency零延迟,表示不滤波):
        // "film", "animation", "grain", "stillimage", "psnr", "ssim", "fastdecode", "zerolatency", 0
        // av_opt_set(pAVCodecContext->priv_data, "tune", "zerolatency", 0); // 会崩溃...
    }

    av_opt_set(pAVCodecContext->priv_data, "preset", "superfast", 0);  
    av_opt_set(pAVCodecContext->priv_data, "tune", "fastdecode", 0);

    pAVCodecContext->coder_type = FF_CODER_TYPE_AC;
    pAVCodecContext->flags |= CODEC_FLAG_LOOP_FILTER | CODEC_FLAG_LOW_DELAY;
    pAVCodecContext->me_cmp |= FF_CMP_CHROMA;
    pAVCodecContext->me_method = ME_EPZS;
    pAVCodecContext->me_subpel_quality = 4;
    pAVCodecContext->me_range = 16;
    pAVCodecContext->scenechange_threshold = 40;
    pAVCodecContext->i_quant_factor = (float)0.71;
    pAVCodecContext->b_frame_strategy = 0;
    pAVCodecContext->qcompress = (float)0.7;
    pAVCodecContext->qmin = 10;
    pAVCodecContext->qmax = 51;
    pAVCodecContext->max_qdiff = 4;
    pAVCodecContext->max_b_frames = 0;
    pAVCodecContext->refs = 2;
    pAVCodecContext->trellis = 1;
    pAVCodecContext->flags2 |= CODEC_FLAG2_FAST;

    if (avcodec_open2(pAVCodecContext, pAVCodec, NULL) < 0)
    {
        FFCODEC_ERR("avcodec_open2 typt(%d-%d) failed \n", type, format);
        avcodec_free_context(&pAVCodecContext);
        return NULL;
    }

    FFCodec_Priv *priv = (FFCodec_Priv*)calloc(1, sizeof(FFCodec_Priv));
    priv->type = type;
    priv->format = format;
    priv->pAVCodecContext = pAVCodecContext;
    priv->frameCodec = av_frame_alloc();
    if (type == CODEC_TYPE_ENCODE)
    {
        priv->frameCodec->format = ffcodec_to_ffmpeg(inputFormat);
        priv->frameCodec->width = pAVCodecContext->width;
        priv->frameCodec->height = pAVCodecContext->height;
        // if (av_frame_get_buffer(priv->frameCodec, 32) != 0)
        // {
        //     FFCODEC_ERR("av_frame_get_buffer typt(%d-%d-%dx%d) failed \n",
        //         type, format, width, height);
        //     av_frame_unref(priv->frameCodec);
        //     av_free(priv->frameCodec);
        //     free(priv);
        //     avcodec_free_context(&pAVCodecContext);
        //     return NULL;
        // }
    }
    else
    {
        priv->frameYUV = av_frame_alloc();
    }
    av_init_packet(&priv->packet);

    return (void*)priv;
}

void ffcodec_deinit(void *handle)
{
    FFCodec_Priv *priv = (FFCodec_Priv*)handle;
    if (!priv)
        return;
    if (priv->pAVCodecContext)
        avcodec_free_context(&priv->pAVCodecContext);
    if (priv->frameCodec)
    {
        av_frame_unref(priv->frameCodec);
        av_free(priv->frameCodec);
    }
    if (priv->frameYUV)
    {
        av_frame_unref(priv->frameYUV);
        av_free(priv->frameYUV);
    }
    av_packet_unref(&priv->packet);
    free(priv);
}

/*
 *  视频解码
 *  参数:
 *      buff: 一帧待解码数据
 *      buffSize: buff字节长度
 *      yuvMap: 数组yuvMap[3]的首地址,用于返回yuv数据(不需要用后释放)
 *      rgbMap: 返回rgb数据,需提前配置好map、width、height、format参数(自行管理内存分配和释放)
 *  返回: 0成功 -1失败
 */
int32_t ffcodec_decode(void *handle, uint8_t *buff, uint32_t buffSize, FFCodec_Map yuvMap[3], FFCodec_Map *rgbMap)
{
    FFCodec_Priv *priv = (FFCodec_Priv*)handle;
    int32_t ret;

    av_frame_unref(priv->frameCodec);
    av_frame_unref(priv->frameYUV);

    priv->packet.data = buff;
    priv->packet.size = buffSize;

    ret = avcodec_send_packet(priv->pAVCodecContext, &priv->packet);
    if (ret != 0)
    {
        // 非数据帧不输出数据,但不代表没有解码需要
        // FFCODEC_ERR("avcodec_send_packet failed: %d - %02X %02X %02X %02X %02X \n",
        //           buffSize, buff[0], buff[1], buff[2], buff[3], buff[4]);
        return -1;
    }

    ret = avcodec_receive_frame(priv->pAVCodecContext, priv->frameCodec);
    if (ret != 0)
    {
        FFCODEC_ERR("avcodec_receive_frame failed: %d - %02X %02X %02X %02X %02X \n",
                  buffSize, buff[0], buff[1], buff[2], buff[3], buff[4]);
        return -1;
    }

    if (yuvMap)
    {
        yuvMap[0].pb = yuvMap[1].pb = yuvMap[2].pb = 1;
        yuvMap[0].format = yuvMap[1].format = yuvMap[2].format = 
            ffmpeg_to_ffcodec((enum AVPixelFormat)priv->frameCodec->format);
        //Y帧
        yuvMap[0].width = priv->frameCodec->linesize[0];
        yuvMap[0].height = priv->frameCodec->height;
        yuvMap[0].map = priv->frameCodec->data[0];
        //U帧
        yuvMap[1].width = priv->frameCodec->linesize[1];
        yuvMap[1].height = priv->frameCodec->height * priv->frameCodec->linesize[1] / priv->frameCodec->width;
        yuvMap[1].map = priv->frameCodec->data[1];
        //V帧
        yuvMap[2].width = priv->frameCodec->linesize[2];
        yuvMap[2].height = priv->frameCodec->height * priv->frameCodec->linesize[2] / priv->frameCodec->width;
        yuvMap[2].map = priv->frameCodec->data[2];
    }

    if (rgbMap && rgbMap->map)
        return ffcodec_YUV_to_RGB(yuvMap, rgbMap);
    return 0;
}

/*
 *  视频编码
 *  参数:
 *      yuvMap: 数组yuvMap[3]的首地址,存储yuv数据,需自行分配好内存并设置正确width、height、format参数
 *      frameBuff: 返回编码后数据指针,请在下次调用该函数前使用完(不需要用后释放)
 *      frameBuffSize: 返回frameBuff数据长度
 *  返回: 0成功 -1失败
 *  注意: yuv编码h264/h265时,由于滤波需要,前面约10帧都不会返回数据,并非系统异常
 */
int32_t ffcodec_encode(
    void *handle,
    FFCodec_Map yuvMap[3],
    uint8_t **frameBuff,
    uint32_t *frameBuffSize)
{
    FFCodec_Priv *priv = (FFCodec_Priv*)handle;
    int32_t ret;
    int got_packet_ptr = 0;

    av_packet_unref(&priv->packet);

    // 配置输入数据参数(直接使用输入数据的内存)
    priv->frameCodec->format = ffcodec_to_ffmpeg(yuvMap[0].format);
    priv->frameCodec->width = yuvMap[0].width;
    priv->frameCodec->height = yuvMap[0].height;

    priv->frameCodec->data[0] = yuvMap[0].map;
    priv->frameCodec->data[1] = yuvMap[1].map;
    priv->frameCodec->data[2] = yuvMap[2].map;

    priv->frameCodec->linesize[0] = yuvMap[0].width;
    priv->frameCodec->linesize[1] = yuvMap[1].width;
    priv->frameCodec->linesize[2] = yuvMap[2].width;

    priv->frameCodec->pts += 1;

    ret = avcodec_encode_video2(
        priv->pAVCodecContext,
        &priv->packet,
        priv->frameCodec,
        &got_packet_ptr);
    if (ret != 0 || got_packet_ptr != 1)
    {
        if (ret != 0)
            FFCODEC_ERR("avcodec_encode_video2: failed, ret/%d got_packet_ptr/%d \n",
                ret, got_packet_ptr);
        priv->frameCodec->data[0] = NULL; // 防止销毁时误释放
        priv->frameCodec->data[1] = NULL;
        priv->frameCodec->data[2] = NULL;
        return -1;
    }

    if (frameBuff)
        *frameBuff = priv->packet.data;
    if (frameBuffSize)
        *frameBuffSize = priv->packet.size;

    priv->frameCodec->data[0] = NULL; // 防止销毁时误释放
    priv->frameCodec->data[1] = NULL;
    priv->frameCodec->data[2] = NULL;
    
    return 0;
}

/*
 *  基于ffmpeg和软件的两种YUV转RGB方法
 */
#if 1

//yuv转rgb,返回0成功,rgbMap需提前配置好map、width、height、format参数
int32_t ffcodec_YUV_to_RGB(FFCodec_Map yuvMap[3], FFCodec_Map *rgbMap)
{
    int32_t ret;

    //YUV三维数据,代表各帧图像
    uint8_t *dataYUV[] = {yuvMap[0].map, yuvMap[1].map, yuvMap[2].map, NULL};
    //RGB一维数组,需注意颜色排列
    uint8_t *dataRGB[] = {rgbMap->map, NULL, NULL, NULL};

    //yuv各帧宽信息(高会自动比例匹配)
    int linesizeYUV[] = {(int)yuvMap[0].width, (int)yuvMap[1].width, (int)yuvMap[2].width, 0};
    //RGB缩放后宽高信息
    int linesizeRGB[] = {(int)(rgbMap->width * 3), (int)(rgbMap->height * 3), 0, 0};

    if (rgbMap->format != CODEC_FORMAT_RGB)
        rgbMap->format = CODEC_FORMAT_RGB;

    struct SwsContext *sContext = sws_getContext(
        yuvMap[0].width,
        yuvMap[0].height,
        ffcodec_to_ffmpeg(yuvMap[0].format),
        rgbMap->width,
        rgbMap->height,
        ffcodec_to_ffmpeg(rgbMap->format),
        SWS_FAST_BILINEAR,
        NULL, NULL, NULL);
    if (!sContext)
    {
        FFCODEC_ERR("sws_getContext failed: %dx%d(yuv) to %dx%d(rgb) \n",
            yuvMap[0].width, yuvMap[0].height, rgbMap->width, rgbMap->height);
        return -1;
    }

    ret = sws_scale(
        sContext,
        (const uint8_t *const *)dataYUV,
        linesizeYUV,
        0,
        yuvMap[0].height,
        dataRGB,
        linesizeRGB);
    // if (ret != 0)
    //     FFCODEC_ERR("sws_scale failed %d \n", ret);

    sws_freeContext(sContext);

    return 0;
}

#else

#define YUV2RGB_WRITE_RGB \
*outMap++ = (uint8_t)(r > 255 ? 255 : (r < 0 ? 0 : r)); \
*outMap++ = (uint8_t)(g > 255 ? 255 : (g < 0 ? 0 : g)); \
*outMap++ = (uint8_t)(b > 255 ? 255 : (b < 0 ? 0 : b));

#define YUV2RGB_WRITE_BGR \
*outMap++ = (uint8_t)(b > 255 ? 255 : (b < 0 ? 0 : b)); \
*outMap++ = (uint8_t)(g > 255 ? 255 : (g < 0 ? 0 : g)); \
*outMap++ = (uint8_t)(r > 255 ? 255 : (r < 0 ? 0 : r));

#define YUV2RGB_WRITE_ARGB \
*outMap++ = 0xFF; \
*outMap++ = (uint8_t)(r > 255 ? 255 : (r < 0 ? 0 : r)); \
*outMap++ = (uint8_t)(g > 255 ? 255 : (g < 0 ? 0 : g)); \
*outMap++ = (uint8_t)(b > 255 ? 255 : (b < 0 ? 0 : b));

#define YUV2RGB_WRITE_ABGR \
*outMap++ = 0xFF; \
*outMap++ = (uint8_t)(b > 255 ? 255 : (b < 0 ? 0 : b)); \
*outMap++ = (uint8_t)(g > 255 ? 255 : (g < 0 ? 0 : g)); \
*outMap++ = (uint8_t)(r > 255 ? 255 : (r < 0 ? 0 : r));

#define YUV2RGB_WRITE_RGBA \
*outMap++ = (uint8_t)(r > 255 ? 255 : (r < 0 ? 0 : r)); \
*outMap++ = (uint8_t)(g > 255 ? 255 : (g < 0 ? 0 : g)); \
*outMap++ = (uint8_t)(b > 255 ? 255 : (b < 0 ? 0 : b)); \
*outMap++ = 0xFF;

#define YUV2RGB_WRITE_BGRA \
*outMap++ = (uint8_t)(b > 255 ? 255 : (b < 0 ? 0 : b)); \
*outMap++ = (uint8_t)(g > 255 ? 255 : (g < 0 ? 0 : g)); \
*outMap++ = (uint8_t)(r > 255 ? 255 : (r < 0 ? 0 : r)); \
*outMap++ = 0xFF;

//基础代码段,其中 action 会用上面定义的几个宏进行替换
#define YUV2RGB_BASE(action) \
for (rgbY = 0; rgbY < rgbMap->height; rgbY++) { \
    yuvOffset[0] = ((int)yuvYStep[0]) * yuvMap[0].width; \
    yuvOffset[1] = ((int)yuvYStep[1]) * yuvMap[1].width; \
    yuvOffset[2] = ((int)yuvYStep[2]) * yuvMap[2].width; \
    yuvXStep[0] = yuvXStep[1] = yuvXStep[2] = 0; \
    for (rgbX = 0; rgbX < rgbMap->width; rgbX++) { \
        u = pU[yuvOffset[1] + (int)yuvXStep[1]] - 128; \
        v = pV[yuvOffset[2] + (int)yuvXStep[2]] - 128; \
        r = pY[yuvOffset[0] + (int)yuvXStep[0]] + (v + ((v * 103) >> 8)); \
        g = pY[yuvOffset[0] + (int)yuvXStep[0]] - (((u * 88) >> 8) + ((v * 183) >> 8)); \
        b = pY[yuvOffset[0] + (int)yuvXStep[0]] + (u +((u*198) >> 8)); \
        action \
        yuvXStep[0] += yuvXDiv[0]; \
        yuvXStep[1] += yuvXDiv[1]; \
        yuvXStep[2] += yuvXDiv[2]; \
    } \
    yuvYStep[0] += yuvYDiv[0]; \
    yuvYStep[1] += yuvYDiv[1]; \
    yuvYStep[2] += yuvYDiv[2]; \
}

//相比上面ffmpeg的方法,这个是基于整形运算的方法
int32_t ffcodec_YUV_to_RGB(FFCodec_Map yuvMap[3], FFCodec_Map *rgbMap)
{
    //缩放倍数,作为step每次增加的量
    float yuvXDiv[3] = {
        (float)yuvMap[0].width / rgbMap->width,
        (float)yuvMap[1].width / rgbMap->width,
        (float)yuvMap[2].width / rgbMap->width,
    };
    float yuvYDiv[3] = {
        (float)yuvMap[0].height / rgbMap->height,
        (float)yuvMap[1].height / rgbMap->height,
        (float)yuvMap[2].height / rgbMap->height,
    };

    //step每次增加一div
    float yuvXStep[3] = {};
    float yuvYStep[3] = {};

    //对step取整即为目标数据在数组中的位置
    int yuvOffset[3] = {};

    //yuv数组转为char格式(有正负号)
    char *pY = (char *)yuvMap[0].map;
    char *pU = (char *)yuvMap[1].map;
    char *pV = (char *)yuvMap[2].map;

    uint8_t *outMap = rgbMap->map;

    //yuv2rgb中间变量
    int u, v, r, g, b;
    uint32_t rgbY, rgbX;

    //UV内存地址互换
    // if (yuvMap[0].format == CODEC_FORMAT_YVU420P ||
    //     yuvMap[0].format == CODEC_FORMAT_YVU422P ||
    //     yuvMap[0].format == CODEC_FORMAT_YVU420SP ||
    //     yuvMap[0].format == CODEC_FORMAT_YVU422SP)
    // {
    //     char *pTemp = pU;
    //     pU = pV;
    //     pV = pTemp;
    // }

    //缩放+转换
    if (rgbMap->format == CODEC_FORMAT_RGB)
        YUV2RGB_BASE(YUV2RGB_WRITE_RGB)
    else if (rgbMap->format == CODEC_FORMAT_BGR)
        YUV2RGB_BASE(YUV2RGB_WRITE_BGR)
    else if (rgbMap->format == CODEC_FORMAT_ARGB)
        YUV2RGB_BASE(YUV2RGB_WRITE_ARGB)
    else if (rgbMap->format == CODEC_FORMAT_ABGR)
        YUV2RGB_BASE(YUV2RGB_WRITE_ABGR)
    else if (rgbMap->format == CODEC_FORMAT_RGBA)
        YUV2RGB_BASE(YUV2RGB_WRITE_RGBA)
    else if (rgbMap->format == CODEC_FORMAT_BGRA)
        YUV2RGB_BASE(YUV2RGB_WRITE_BGRA)
    else
    {
        FFCODEC_ERR("Unsupported translate format(%d->%d) !! \n",
            yuvMap[0].format, rgbMap->format);
        return -1;
    }
    return 0;
}
#endif

// ----- 其它工具方法 -----

//查找h264/265帧位置,返回offset,-1失败
int32_t h26xFindFrame(const uint8_t *buff, uint32_t buffLen, int32_t *frameSize, char *type)
{
    uint32_t i;
    int32_t offset = -1;
    int32_t offset2 = buffLen - 1;
    uint8_t bType, bH264, bH265;

    //查找头 0,0,1 或者 0,0,0,1
    for (i = 0; i < buffLen - 3; i++)
    {
        if (buff[i] == 0 && buff[i + 1] == 0 &&
            (buff[i + 2] == 1 || (buff[i + 2] == 0 && buff[i + 3] == 1)))
        {
            offset = i;
            if (type)
            {
                bType = buff[i + 2] == 1 ? buff[i + 3] : buff[i + 4];
                bH264 = bType & 0x1F;
                bH265 = (bType & 0x7E) >> 1;

                if (bH264 == 7 || bH265 == 33)
                    *type = 'S';
                else if (bH264 == 5 || bH265 == 19)
                    *type = 'I';
                else if (bH264 == 1 || bH265 == 1)
                    *type = 'P';
                // else if (bH264 == xx || bH265 == xx)
                //     *type = 'B';
                else
                    *type = 0;
            }
            break;
        }
    }

    if (offset < 0)
        return offset;

    //查找下一个 0,0,1 或者 0,0,0,1
    for (i = offset + 3; i < buffLen - 3; i++)
    {
        if (buff[i] == 0 && buff[i + 1] == 0 &&
            (buff[i + 2] == 1 || (buff[i + 2] == 0 && buff[i + 3] == 1)))
        {
            offset2 = i;
            break;
        }
    }

    if (frameSize)
        *frameSize = offset2 - offset;
    return offset;
}

//移动缓冲区,配合H26xFindFrame()使用
void h26xMoveBuff(uint8_t *buff, int32_t *buffLen, int32_t *buffOffset, int32_t *frameSize)
{
    int32_t i, j;
    for (i = 0, j = (*buffOffset) + (*frameSize); i < (*buffLen) - (*buffOffset) - (*frameSize);)
        buff[i++] = buff[j++];
    *buffLen -= (*buffOffset) + (*frameSize);
    *buffOffset = *frameSize = 0;
}
