//
// Created by asus-cp on 2017-07-06.
//


#include <stdio.h>
#include <time.h>

#include "com_example_ffmpegdemo_DecodeUtil.h"

#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavutil/log.h"

#define ANDROID
#ifdef ANDROID
#include <android/log.h>
#define LOGE(format, ...)  __android_log_print(ANDROID_LOG_ERROR, "(>_<)", format, ##__VA_ARGS__)
#define LOGI(format, ...)  __android_log_print(ANDROID_LOG_INFO,  "(^_^)", format, ##__VA_ARGS__)
#else
#define LOGE(format, ...)  printf("(>_<) " format "\n", ##__VA_ARGS__)
#define LOGI(format, ...)  printf("(^_^) " format "\n", ##__VA_ARGS__)
#endif


//Output FFmpeg's av_log()
void custom_log(void *ptr, int level, const char* fmt, va_list vl){
    FILE *fp=fopen("/storage/emulated/0/av_log.txt","a+");
    if(fp){
        vfprintf(fp,fmt,vl);
        fflush(fp);
        fclose(fp);
    }
}

 void  Java_com_example_ffmpegdemo_DecodeUtil_decodeNative(JNIEnv *env, jobject obj, jstring input_jstr, jstring output_jstr){

            AVFormatContext *pFormatCtx = NULL;
            int             i, videoindex;
            AVCodecContext  *pCodecCtx;
            AVCodec         *pCodec;
            AVFrame *pFrame,*pFrameYUV;
            uint8_t *out_buffer;
            AVPacket *packet;
            int y_size;
            int ret, got_picture;
            struct SwsContext *img_convert_ctx;
            FILE *fp_yuv;
            int frame_cnt;
            clock_t time_start, time_finish;
            double  time_duration = 0.0;

            char input_str[500]={0};
            char output_str[500]={0};
            char info[1000]={0};

            //我定义的
            int errorCode = 99;
            int* lineSize = NULL;
            AVDictionaryEntry *m = NULL;

            sprintf(input_str,"%s",(*env)->GetStringUTFChars(env,input_jstr, NULL));
            sprintf(output_str,"%s",(*env)->GetStringUTFChars(env,output_jstr, NULL));

            LOGI("input_str =  %s, output_str = %s",input_str,output_str);

            //FFmpeg av_log() callback
          	av_log_set_callback(custom_log);

            av_register_all();//注册所有支持的编码格式和解码格式
            avformat_network_init();
            //pFormatCtx = avformat_alloc_context();

            if(errorCode = avformat_open_input(&pFormatCtx,input_str,NULL,NULL)){
                LOGE("errorCode = %d", errorCode);
                LOGE("Couldn't open input stream.\n");
                return -1;
            }
            LOGI("avformat_open_input之后");

            if(avformat_find_stream_info(pFormatCtx,NULL)<0){
                LOGE("Couldn't find stream information.\n");
                return -1;
            }
            LOGI("avformat_find_stream_info之后");

            while(m=av_dict_get(pFormatCtx->metadata,"",m,AV_DICT_IGNORE_SUFFIX)){
                LOGI("元数据信息 key = %s, value = %s", m->key, m->value);
            }

            LOGI("流的个数 = %d", pFormatCtx->nb_streams);
            videoindex=-1;
            for(i=0; i<pFormatCtx->nb_streams; i++)  //流一般也就音频和视频2个，一个音频流，一个视频流
                if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){
                    videoindex=i;
                    break;
                }
            if(videoindex==-1){
                LOGE("Couldn't find a video stream.\n");
                return -1;
            }
            pCodecCtx=pFormatCtx->streams[videoindex]->codec;
            pCodec=avcodec_find_decoder(pCodecCtx->codec_id);//codec_id不知是不是AV_CODEC_ID_MPEG4
            if(AV_CODEC_ID_MPEG4 == pCodecCtx->codec_id){
                LOGI("编码格式是AV_CODEC_ID_MPEG4");
            }
            if(pCodec==NULL){
                LOGE("Couldn't find Codec.\n");
                return -1;
            }
            //avcodec_open2()这个函数调用之后才算解码完成 AVFormatContext里面才会有 AVPacket的数据
            if(avcodec_open2(pCodecCtx, pCodec,NULL)<0){
                LOGE("Couldn't open codec.\n");
                return -1;
            }

            pFrame=av_frame_alloc();
            pFrameYUV=av_frame_alloc();
            out_buffer=(unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P,  pCodecCtx->width, pCodecCtx->height,1));
            av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize,out_buffer,
                AV_PIX_FMT_YUV420P,pCodecCtx->width, pCodecCtx->height,1);


            packet=(AVPacket *)av_malloc(sizeof(AVPacket));

            img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
            pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);


          sprintf(info,   "[Input     ]%s\n", input_str);
          sprintf(info, "%s[Output    ]%s\n",info,output_str);
          sprintf(info, "%s[Format    ]%s\n",info, pFormatCtx->iformat->name);
          sprintf(info, "%s[Codec     ]%s\n",info, pCodecCtx->codec->name);
          sprintf(info, "%s[Resolution]%dx%d\n",info, pCodecCtx->width,pCodecCtx->height);

          LOGI("info = %s", info);


          fp_yuv=fopen(output_str,"wb+");
          if(fp_yuv==NULL){
                printf("Cannot open output file.\n");
                return -1;
            }

            frame_cnt=0;
            time_start = clock();

            while(av_read_frame(pFormatCtx, packet)>=0){  //从AVFormatContext中读取一个packet
                if(packet->stream_index==videoindex){ //读取出来的packet里面装的是视频流
                    ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
                    if(ret < 0){
                        LOGE("Decode Error.\n");
                        return -1;
                    }
                    if(got_picture){
                        sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
                            pFrameYUV->data, pFrameYUV->linesize);

                        y_size=pCodecCtx->width*pCodecCtx->height;
                        fwrite(pFrameYUV->data[0],1,y_size,fp_yuv);    //Y
                        fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv);  //U
                        fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv);  //V
                        //Output info
                        char pictype_str[10]={0};
                        switch(pFrame->pict_type){
                            case AV_PICTURE_TYPE_I:sprintf(pictype_str,"I");break;
                          	case AV_PICTURE_TYPE_P:sprintf(pictype_str,"P");break;
                            case AV_PICTURE_TYPE_B:sprintf(pictype_str,"B");break;
                            default:sprintf(pictype_str,"Other");break;
                        }
                        LOGI("Frame Index: %5d. Type:%s",frame_cnt,pictype_str);  //注意这个方法的使用
                        lineSize = pFrame->linesize;
                        LOGI("lineSize = %d, %d, %d, %d, %d, %d, %d, %d",lineSize[0],lineSize[1],lineSize[2],lineSize[3],
                        lineSize[4],lineSize[5],lineSize[6],lineSize[7]);

                        frame_cnt++;
                    }
                }
                av_free_packet(packet);//释放packet
            }
            //flush decoder
            //FIX: Flush Frames remained in Codec
            while (1) {
                ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
                if (ret < 0)
                    break;
                if (!got_picture)
                    break;
                sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
                    pFrameYUV->data, pFrameYUV->linesize);
                int y_size=pCodecCtx->width*pCodecCtx->height;
                fwrite(pFrameYUV->data[0],1,y_size,fp_yuv);    //Y
                fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv);  //U
                fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv);  //V
                //Output info
                char pictype_str[10]={0};
                switch(pFrame->pict_type){
                    case AV_PICTURE_TYPE_I:sprintf(pictype_str,"I");break;
                  	case AV_PICTURE_TYPE_P:sprintf(pictype_str,"P");break;
                    case AV_PICTURE_TYPE_B:sprintf(pictype_str,"B");break;
                    default:sprintf(pictype_str,"Other");break;
                }
                LOGI("Frame Index: %5d. Type:%s",frame_cnt,pictype_str);
                frame_cnt++;
            }
            time_finish = clock();
            time_duration=(double)(time_finish - time_start);

            sprintf(info, "%s[Time      ]%fms\n",info,time_duration);
            sprintf(info, "%s[Count     ]%d\n",info,frame_cnt);

            sws_freeContext(img_convert_ctx);

            fclose(fp_yuv);

            av_frame_free(&pFrameYUV);
            av_frame_free(&pFrame);
            avcodec_close(pCodecCtx);
            avformat_close_input(&pFormatCtx);

            return 0;

  }

