#include <stdio.h>

#include <stdlib.h>
#include <string.h>

#include <libavcodec/avcodec.h>
#include <libavutil/opt.h>
#include <libavutil/imgutils.h>
#include <libavformat/avformat.h>

#define INBUF_SIZE 4096

#include <libavutil/imgutils.h>
#include <libavutil/samplefmt.h>
#include <libavutil/timestamp.h>
#include <libavutil/pixfmt.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>

#include "assgl.h"


AssglLib* lib = NULL;

static AVFormatContext *fmt_ctx = NULL;
static AVCodecContext *video_dec_ctx = NULL;

static int width, height;
static enum AVPixelFormat pix_fmt;
static AVStream *video_stream = NULL, *audio_stream = NULL;
static const char *src_filename = NULL;

static uint8_t *video_dst_data[4] = {NULL};
static int      video_dst_linesize[4];
static int video_dst_bufsize;

static int video_stream_idx = -1;

static AVFrame *frame = NULL;
static AVPacket *pkt = NULL;
static int video_frame_count = 0;
static int audio_frame_count = 0;
unsigned int frameIndex = 0;

AVFrame* decode_image_to_avframe(const char* filename) ;
void savePicture(AVFrame *frameResult,int frameIndex);

static int output_video_frame(AVFrame *frame)
{
#if 1
    if (frame->width != width || frame->height != height ||
        frame->format != pix_fmt) {
        
        fprintf(stderr, "Error: Width, height and pixel format have to be "
                "constant in a rawvideo file, but the width, height or "
                "pixel format of the input video changed:\n"
                "old: width = %d, height = %d, format = %s\n"
                "new: width = %d, height = %d, format = %s\n",
                width, height, av_get_pix_fmt_name(pix_fmt),
                frame->width, frame->height,
                av_get_pix_fmt_name(frame->format));
        return -1;
    }

    if (frameIndex == 0)
    {
        savePicture(frame,0);
        //读取png图片
        AVFrame *framePng = NULL;
        //png 图片解码出avframe
#if 0
        framePng = decode_image_to_avframe("a.png");
        if (framePng == NULL){
            printf("获取单张图片失败\n");
        }
#endif

        {
            AssglLib asslibS;
            assglWindowInit(frame->width,frame->height,&(asslibS.GlParamS));
            assglTextInit(frame->width,frame->height,&(asslibS.GlParamS));
            char *buf = assglReader(frame->width,frame->height,&(asslibS.GlParamS));
            //buf 转 AVFrame
            // AVFrame newFrame;
            charToAVFrame(buf,0,frame->width,frame->height,&framePng,AV_PIX_FMT_RGBA);
            //AVFrame blend
            // framePng = blendFromCV(&newFrame,frame);
        }


        savePicture(framePng,1);

        //blend
        AVFrame *frameResult = NULL;

        frameResult = blendFromCV(framePng,frame);
        if (frameResult == NULL){
            printf("blend图片失败\n");
        }

        savePicture(frameResult,2);
        frameIndex++;
    }
    return -1;//故意如此设置，为了只执行一次
#endif
    return 0;
}

static int decode_packet(AVCodecContext *dec, const AVPacket *pkt)
{
    int ret = 0;

    // submit the packet to the decoder
    ret = avcodec_send_packet(dec, pkt);
    if (ret < 0) {
        fprintf(stderr, "Error submitting a packet for decoding (%s)\n", av_err2str(ret));
        return ret;
    }

    // get all the available frames from the decoder
    while (ret >= 0) {
        ret = avcodec_receive_frame(dec, frame);
        if (ret < 0) {
            // those two return values are special and mean there is no output
            // frame available, but there were no errors during decoding
            if (ret == AVERROR_EOF || ret == AVERROR(EAGAIN))
                return 0;

            fprintf(stderr, "Error during decoding (%s)\n", av_err2str(ret));
            return ret;
        }

        // write the frame data to output file
        if (dec->codec->type == AVMEDIA_TYPE_VIDEO){

#if 1
            // ret = output_video_frame(frame);
            // assgl_render_frame_test(lib,&frame,0);
            ret = output_video_frame(frame);
            if ((ret = -1) < 0) // 故意让ret小于0，好直接退出
                return ret;
#else
            ret = output_video_frame(frame);
            if (ret < 0) //故意让ret小于0，好直接退出
                return ret;
#endif
        }

        av_frame_unref(frame);
        if (ret < 0)
            return ret;
    }

    return 0;
}

static int open_codec_context(int *stream_idx,
                              AVCodecContext **dec_ctx, AVFormatContext *fmt_ctx, enum AVMediaType type)
{
    int ret, stream_index;
    AVStream *st;
    const AVCodec *dec = NULL;

    ret = av_find_best_stream(fmt_ctx, type, -1, -1, NULL, 0);
    if (ret < 0) {
        fprintf(stderr, "Could not find %s stream in input file '%s'\n",
                av_get_media_type_string(type), src_filename);
        return ret;
    } else {
        stream_index = ret;
        st = fmt_ctx->streams[stream_index];

        /* find decoder for the stream */
        dec = avcodec_find_decoder(st->codecpar->codec_id);
        if (!dec) {
            fprintf(stderr, "Failed to find %s codec\n",
                    av_get_media_type_string(type));
            return AVERROR(EINVAL);
        }

        /* Allocate a codec context for the decoder */
        *dec_ctx = avcodec_alloc_context3(dec);
        if (!*dec_ctx) {
            fprintf(stderr, "Failed to allocate the %s codec context\n",
                    av_get_media_type_string(type));
            return AVERROR(ENOMEM);
        }

        /* Copy codec parameters from input stream to output codec context */
        if ((ret = avcodec_parameters_to_context(*dec_ctx, st->codecpar)) < 0) {
            fprintf(stderr, "Failed to copy %s codec parameters to decoder context\n",
                    av_get_media_type_string(type));
            return ret;
        }

        /* Init the decoders */
        if ((ret = avcodec_open2(*dec_ctx, dec, NULL)) < 0) {
            fprintf(stderr, "Failed to open %s codec\n",
                    av_get_media_type_string(type));
            return ret;
        }
        *stream_idx = stream_index;
    }

    return 0;
}

static int get_format_from_sample_fmt(const char **fmt,
                                      enum AVSampleFormat sample_fmt)
{
    int i;
    struct sample_fmt_entry {
        enum AVSampleFormat sample_fmt; const char *fmt_be, *fmt_le;
    } sample_fmt_entries[] = {
        { AV_SAMPLE_FMT_U8,  "u8",    "u8"    },
        { AV_SAMPLE_FMT_S16, "s16be", "s16le" },
        { AV_SAMPLE_FMT_S32, "s32be", "s32le" },
        { AV_SAMPLE_FMT_FLT, "f32be", "f32le" },
        { AV_SAMPLE_FMT_DBL, "f64be", "f64le" },
    };
    *fmt = NULL;

    for (i = 0; i < FF_ARRAY_ELEMS(sample_fmt_entries); i++) {
        struct sample_fmt_entry *entry = &sample_fmt_entries[i];
        if (sample_fmt == entry->sample_fmt) {
            *fmt = AV_NE(entry->fmt_be, entry->fmt_le);
            return 0;
        }
    }

    fprintf(stderr,
            "sample format %s is not supported as output format\n",
            av_get_sample_fmt_name(sample_fmt));
    return -1;
}

int main (int argc, char **argv)
{
    int ret = 0;
    src_filename = "1280x720.mp4";//argv[1];

    lib = assgl_read_file("www.ass");
    if (!lib){
        printf( "assgl_read_file has err\n");    
    }

    // openglInit(lib);

    /* open input file, and allocate format context */
    if (avformat_open_input(&fmt_ctx, src_filename, NULL, NULL) < 0) {
        fprintf(stderr, "Could not open source file %s\n", src_filename);
        exit(1);
    }

    /* retrieve stream information */
    if (avformat_find_stream_info(fmt_ctx, NULL) < 0) {
        fprintf(stderr, "Could not find stream information\n");
        exit(1);
    }

    if (open_codec_context(&video_stream_idx, &video_dec_ctx, fmt_ctx, AVMEDIA_TYPE_VIDEO) >= 0) {
        video_stream = fmt_ctx->streams[video_stream_idx];

        /* allocate image where the decoded image will be put */
        width = video_dec_ctx->width;
        height = video_dec_ctx->height;
        pix_fmt = video_dec_ctx->pix_fmt;
        ret = av_image_alloc(video_dst_data, video_dst_linesize,
                             width, height, pix_fmt, 1);
        if (ret < 0) {
            fprintf(stderr, "Could not allocate raw video buffer\n");
            goto end;
        }
        video_dst_bufsize = ret;
    }

    /* dump input information to stderr */
    av_dump_format(fmt_ctx, 0, src_filename, 0);

    if (!audio_stream && !video_stream) {
        fprintf(stderr, "Could not find audio or video stream in the input, aborting\n");
        ret = 1;
        goto end;
    }

    frame = av_frame_alloc();
    if (!frame) {
        fprintf(stderr, "Could not allocate frame\n");
        ret = AVERROR(ENOMEM);
        goto end;
    }

    pkt = av_packet_alloc();
    if (!pkt) {
        fprintf(stderr, "Could not allocate packet\n");
        ret = AVERROR(ENOMEM);
        goto end;
    }

    /* read frames from the file */
    while (av_read_frame(fmt_ctx, pkt) >= 0) {
        // check if the packet belongs to a stream we are interested in, otherwise
        // skip it
        if (pkt->stream_index == video_stream_idx)
            ret = decode_packet(video_dec_ctx, pkt);

        av_packet_unref(pkt);
        if (ret < 0)
            break;
    }

    /* flush the decoders */
    if (video_dec_ctx)
        decode_packet(video_dec_ctx, NULL);

    printf("Demuxing succeeded.\n");

end:
    avcodec_free_context(&video_dec_ctx);
    avformat_close_input(&fmt_ctx);
  
    av_packet_free(&pkt);
    av_frame_free(&frame);
    av_free(video_dst_data[0]);

    return ret < 0;
}

  
AVFrame* decode_image_to_avframe(const char* filename) {  
    AVCodec *codec = NULL;  
    AVCodecContext *codec_ctx = NULL;  
    AVFormatContext *format_ctx = NULL;  
    AVPacket packet;  
    AVFrame *frame = NULL;  
    int video_stream_index = -1;  
    int ret;  
  
    // 初始化FFmpeg库  
    av_register_all();  
  
    // 打开输入文件  
    if (avformat_open_input(&format_ctx, filename, NULL, NULL) != 0) {  
        fprintf(stderr, "Could not open file '%s'\n", filename);  
        goto end;  
    }  
  
    // 查找流信息  
    if (avformat_find_stream_info(format_ctx, NULL) < 0) {  
        fprintf(stderr, "Could not find stream information\n");  
        goto end;  
    }  
  
    // 找到视频流  
    for (int i = 0; i < format_ctx->nb_streams; i++) {  
        if (format_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {  
            video_stream_index = i;  
            break;  
        }  
    }  
    if (video_stream_index == -1) {  
        fprintf(stderr, "Did not find a video stream\n");  
        goto end;  
    }  
  
    // 查找解码器  
    codec = avcodec_find_decoder(format_ctx->streams[video_stream_index]->codecpar->codec_id);  
    if (!codec) {  
        fprintf(stderr, "Unsupported codec!\n");  
        goto end;  
    }  
  
    // 分配解码器上下文  
    codec_ctx = avcodec_alloc_context3(codec);  
    if (!codec_ctx) {  
        fprintf(stderr, "Could not allocate video codec context\n");  
        goto end;  
    }  
  
    // 复制编解码器参数  
    if (avcodec_parameters_to_context(codec_ctx, format_ctx->streams[video_stream_index]->codecpar) < 0) {  
        fprintf(stderr, "Could not copy codec parameters\n");  
        goto end;  
    }  
  
    // 打开解码器  
    if (avcodec_open2(codec_ctx, codec, NULL) < 0) {  
        fprintf(stderr, "Could not open codec\n");  
        goto end;  
    }  
  
    // 读取一个数据包  
    ret = av_read_frame(format_ctx, &packet);  
    if (ret < 0) {  
        fprintf(stderr, "Could not read a packet\n");  
        goto end;  
    }  
  
    // 如果数据包不是视频流，则丢弃  
    if (packet.stream_index != video_stream_index) {  
        av_packet_unref(&packet);  
        goto end;  
    }  
  
    // 分配AVFrame  
    frame = av_frame_alloc();  
    if (!frame) {  
        fprintf(stderr, "Could not allocate video frame\n");  
        goto end;  
    }  
  
    // 解码数据包  
    ret = avcodec_send_packet(codec_ctx, &packet);  
    if (ret < 0) {  
        fprintf(stderr, "Error sending a packet for decoding\n");  
        goto end;  
    }  
  
    // 接收解码后的帧  
    ret = avcodec_receive_frame(codec_ctx, frame);  
    if (ret < 0) {  
        fprintf(stderr, "Error during decoding\n");  
        goto end;  
    }  
  
    // 此时，frame 包含解码后的图像数据  
    // 你可以在这里处理AVFrame数据，例如将其转换为其他格式或进行其他操作  
  
    // 注意：在实际应用中，你可能需要确保在函数返回之前释放packet和format_ctx，  
    // 但在这个示例中，为了简化，我们假设这些资源在其他地方被管理。  
    // av_packet_free(*(&packet));
    avformat_close_input(&format_ctx);
end:
    // 成功，返回AVFrame  
    return frame;
}

void savePicture(AVFrame *frameResult,int frameIndex){
    uint8_t *video_dst_data[4] = {NULL};
    int      video_dst_linesize[4];
    int video_dst_bufsize;

    int ret = av_image_alloc(video_dst_data, video_dst_linesize,
                             frameResult->width, frameResult->height,
                             frameResult->format, 1);

    if (ret < 0)
    {
        fprintf(stderr, "Could not allocate raw video buffer\n");
        return ;
    }
    video_dst_bufsize = ret;

    av_image_copy(video_dst_data, video_dst_linesize,
                  (const uint8_t **)(frameResult->data), frameResult->linesize,
                  frameResult->format, frameResult->width, frameResult->height);

    //展示图片查看结果
    // 这里就是一帧一帧的数据记录了
    char tmpstr[100] = {};
    sprintf(tmpstr, "yuv-%d.yuv", frameIndex);
    FILE *f1;
    f1 = fopen(tmpstr, "wb");
    fwrite(video_dst_data[0], 1, video_dst_bufsize, f1);
    fclose(f1);
}