// Created by Chen qin lang on 2018/6/3.
//
#include <jni.h>
#include <stdio.h>

#include "com_zagj_videocomparess_MainActivity.h"
#include <string>
#include <fstream>
#include <iostream>
#include <thread>
#include <memory>
#include <pthread.h>
extern "C" {
#include "libavformat/avformat.h"
#include <android/log.h>

#include <libavfilter/buffersrc.h>
#include <libavfilter/buffersink.h>
#include "libswscale/swscale.h"
#include <libavdevice/avdevice.h>
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include "libavutil/opt.h"
#include "libavutil/channel_layout.h"
#include "libavutil/common.h"
#include "libavutil/imgutils.h"

}
using namespace std;
#define LOGE(format, ...) __android_log_print(ANDROID_LOG_ERROR, "(>_<)", format,  ##__VA_ARGS__)
#define LOGI(format, ...)  __android_log_print(ANDROID_LOG_INFO,  "(^_^)", format, ##__VA_ARGS__)
AVFormatContext *inputContext[2];
AVFormatContext *outputCtx;
AVCodecContext *decodeContext[2];
AVCodecContext *outputCodec;
AVFilterGraph *avFilterGraph;
AVFilterInOut *inputs;
AVFilterInOut *outputs;
AVFilterContext *pFilterContext[2];
AVFilterContext *outputFilter;
AVFrame *pSrcFrame[2];
AVFrame *pDstFrame;
AVFrame *inputFrame[2];
const char *filter_descr = "overlay=100:100";
int OpenInput(AVFormatContext *avFormatContext, char *filename, int indexFormat) {
    LOGE("OpenInput index: %d \n" , indexFormat);
    inputContext[indexFormat] = avformat_alloc_context();

    int re = 0;
    if (re = avformat_open_input( &inputContext[indexFormat], filename, NULL, NULL) != 0) {
        LOGE("Couldn't avformat_open_input\n");

        return re;
    }
    LOGE("OpenInput333");
    re = avformat_find_stream_info( inputContext[indexFormat], NULL);
    av_dump_format(inputContext[indexFormat], 0, filename, 0);
    if ( re< 0) {
        LOGE("Couldn't find stream information.%d\n",indexFormat);

        return re;
    }
    LOGE("OpenInput inputContext===%d ---- %d\n",indexFormat, inputContext[indexFormat]->nb_streams);
    return 0;
}
int InitDecodeCodec(int index) {
    AVCodec *decoder = avcodec_find_decoder(inputContext[index]->streams[0]->codec->codec_id);
    if (!decoder) {
        LOGE("can not found decodec \n");
        return -1;
    }
    decodeContext[index]= inputContext[index]->streams[0]->codec;
    LOGE("OpenInput index: %d \n" ,  decodeContext[index]->coded_height);
    if (! decodeContext[index]) {
        LOGE("can not alloc codecContext failed \n");
        return -1;
    }
      if (decoder->capabilities & AV_CODEC_CAP_TRUNCATED )
          inputContext[index]->flags |= AV_CODEC_FLAG_TRUNCATED;
    int ret = avcodec_open2( decodeContext[index], decoder, NULL);
    return ret;

}
int OpenOutput(AVFormatContext *pContext, AVFormatContext *avFormatContext, char *filename) {
    int ret = avformat_alloc_output_context2(&outputCtx, NULL, "mpegts", filename);
    if (ret < 0) {
        LOGE("alloc output Context failed\n");
        return -1;
    }
    ret = avio_open2(&outputCtx->pb, filename, AVIO_FLAG_WRITE,NULL,NULL);
    if (ret < 0) {
        LOGE("avio open failed \n");
        return -1;
    }
    LOGE("avio open success %d \n",ret);

    for (int i = 0; i < inputContext[0]->nb_streams; ++i) {
        LOGE("nb_streams\n");
      /*  if(inputContext[0]->streams[i]->codec->codec_type == AVMediaType::AVMEDIA_TYPE_AUDIO)
        {
            LOGE("nb_streams\n");
            continue;
        }*/
        LOGE("avformat_new_stream before\n");
        if (outputCodec==NULL  ||outputCodec==NULL){
            LOGE("outputCodec==NULL  ||outputCodec==NULL\n");
            return -1;
        }
        if (outputCodec==NULL){
            LOGE("outputCodec==NULL  ||outputCodec==NULL\n");
            return -1;
        }
        AVStream *avStream = avformat_new_stream(outputCtx, outputCodec->codec);
        LOGE("avformat_new_stream\n");
        ret = avcodec_copy_context(avStream->codec,  outputCodec);
        if (ret < 0) {
            LOGE(" avcodec copy context failed \n");
            goto error;
        }
        LOGE("avformat_write_header\n");
        ret = avformat_write_header(outputCtx, NULL);
        LOGE("avformat_write_header\n");
        if (ret < 0) {
            LOGE("write header failed \n");
            goto error;
        }
    }
    return ret;
error:
    if (outputCtx) {
        avformat_close_input(&outputCtx);
    }
    return ret ;
}
int InitEncodec(AVCodecContext *pCodecContext, int width, int height, int index) {
    AVCodec *encoder = avcodec_find_encoder(AV_CODEC_ID_H264);
    LOGE("find encoder");
    if (NULL==encoder) {
        LOGE("find decode failed");
        return -1;
    }
    LOGE("find avcodec_alloc_context3b");
    outputCodec = avcodec_alloc_context3(encoder);
    LOGE("find avcodec_alloc_context3a");

    outputCodec->gop_size = 30;
    LOGE("find pix_fmtsa  ");
    outputCodec->pix_fmt = *encoder->pix_fmts;
    LOGE("find pix_fmtsb  ");

    outputCodec->has_b_frames = 0;
    LOGE("find timebasea  ");
    outputCodec->time_base.num = decodeContext[index]->time_base.num;
    outputCodec->time_base.den = decodeContext[index]->time_base.den;
    LOGE("find timebaseb ");

    outputCodec->max_b_frames = 0;
    LOGE("find max_b_frames ");
    outputCodec->codec_id = encoder->id;
    LOGE("find codec_id ");
    outputCodec->height = height;
    LOGE("find height ");

    outputCodec->width = width;
    LOGE("find width ");
    outputCodec->me_subpel_quality = 0;
    LOGE("find me_subpel_quality ");

    outputCodec->refs = 1;
    LOGE("find refs ");

    outputCodec->scenechange_threshold = 0;
    LOGE("find scenechange_threshold ");

    outputCodec->trellis = 0;
    LOGE("find trellis ");

    AVDictionary *options = nullptr;
    LOGE("find AV_CODEC_FLAG_GLOBAL_HEADER  ");
    outputCodec->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
    LOGE("open codec ");
    int ret = avcodec_open2(outputCodec, encoder, &options);
    LOGE("open codec a");
    if (ret < 0) {
        LOGE("open codec failed");
        return ret;
    }
    LOGE("open codec success %d", ret);
    return 1;
}
int InitInputFilter(AVFilterInOut *inoutfilter, AVFilterContext *pFilterContext1, AVFilterGraph *filter_graph,
                string overlayName, int index) {
    char args[512];
    memset(args, 0, sizeof(args));
    LOGE("memset");
    auto codec = inputContext[index]->streams[0]->codec;
    LOGE("codec");
    sprintf(args,
            "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
            codec->height, codec->width, codec->pix_fmt, codec->time_base.num,
            codec->time_base.den / codec->ticks_per_frame,
            codec->sample_aspect_ratio.num, codec->sample_aspect_ratio.den);
    LOGE("codec %s",args);
    int pad_idx;
    AVFilterContext *padFilterContext;
    if (index==0){
        LOGE("pAVFilter0)");
        padFilterContext = inputs->filter_ctx;
        pad_idx = inputs->pad_idx;
    } else{
        LOGE("pAVFilter1");
        padFilterContext=inputs->next->filter_ctx;
        pad_idx = inputs->next->pad_idx;
    }

    LOGE("padFilterContext ");
    AVFilter *filter = avfilter_get_by_name("buffer");
    LOGE("pAVFilter");
    int ret=avfilter_graph_create_filter(&pFilterContext[index], filter, overlayName.c_str(), args, NULL, avFilterGraph);
    if (ret<0){
        LOGE("create filter failed\n");
        return -1;
    }
   ret= avfilter_link(pFilterContext[index], 0, padFilterContext, pad_idx);
    if (ret<0){
        LOGE("avfilter_link filter failed\n");
        return -1;
    }
    return ret;
}
int InitOutputFIlter(AVFilterInOut *pOut, AVFilterContext *pContext, AVFilterGraph *pGraph,
                     const char *filename) {
    LOGE("InitOutputFIlter");
    AVFilterContext *padFilterCtx = outputs->filter_ctx;
    LOGE("padFilterCtx");

    AVFilter *filter = avfilter_get_by_name("buffersink");
    LOGE("avfilter_get_by_name");

    int ret = avfilter_graph_create_filter(&outputFilter, filter, filename, NULL, NULL, avFilterGraph);
    if (ret < 0) {
        LOGE("graph create failed \n");
        return ret;
    }
    ret = avfilter_link(padFilterCtx, outputs->pad_idx, outputFilter, 0);
    if (ret < 0) {
        LOGE("link failed \n");
        return ret;
    }
    return ret;

}
void freeInput(AVFilterInOut *pOut, AVFilterInOut *pInOut, AVFilterInOut *pFilterInOut) {
    avfilter_inout_free(&inputs->next);
    avfilter_inout_free(&inputs);

    avfilter_inout_free(&outputs);
}
shared_ptr<AVPacket>  ReadPacketFromSource( int index) {
    std::shared_ptr<AVPacket> packet(static_cast<AVPacket*>(av_malloc(sizeof(AVPacket))), [&](AVPacket *p) { av_packet_free(&p); av_freep(&p); });

    av_init_packet(packet.get());
    int ret = av_read_frame(inputContext[index], packet.get());
    if (ret >= 0) {
        LOGE("read packet success");
        return packet;
    } else {
        LOGE("read packet failed");
        return nullptr;
    }
}
int DecodeVideo(AVCodecContext* codecContext,AVFrame*frame ,AVPacket *packet,int index){
    int  got_frame=0;
    int ret=avcodec_decode_video2(decodeContext[index],frame,&got_frame,packet);
    if (ret>0 && got_frame){
        LOGE("avcodec_decode_video2");
        return true;
    } else{
        LOGE("decodec videco failed");
        return false;
    }
}
void* task(void*){
    bool ret = true;
    while (ret) {
        LOGE(" task");
        auto packet= ReadPacketFromSource(1);
        ret = DecodeVideo(inputContext[1]->streams[0]->codec , pSrcFrame[1],packet.get(),1);
        LOGE(" DecodeVideo index:---- %d",ret);
        if (ret){
            ret= false;
            break;
        }
    }
}
void CloseOutput(AVFormatContext *outputContext)
{
    if(outputContext != nullptr)
    {
        for(int i = 0 ; i < outputContext->nb_streams; i++)
        {
            AVCodecContext *codecContext = outputContext->streams[i]->codec;
            avcodec_close(codecContext);
        }
        avformat_close_input(&outputContext);
    }
}
int EncodecVideo(AVFormatContext *pContext, AVFrame *pFrame,AVPacket* packet) {
    int got_packet;
    int ret =avcodec_encode_video2(outputCodec,packet,pDstFrame,&got_packet);
    if (ret>=0&&got_packet){
        LOGE(" encode frame success\n ");
        return 0;
    }
    LOGE(" encode frame failed 237\n ");
    return ret;
}
void CloseInput(AVFormatContext *formatContext)
{
    if(formatContext != nullptr)
    {
        avformat_close_input(&formatContext);
    }
}
int av_rscal_q(AVRational src_tb ,AVRational dst_tb,AVPacket *pkt){
    if (pkt->pts != AV_NOPTS_VALUE)

        pkt->pts = av_rescale_q_rnd(pkt->pts, src_tb, dst_tb,AV_ROUND_NEAR_INF);
    LOGE(" encode frame pts\n ");
    if (pkt->dts != AV_NOPTS_VALUE)
        pkt->dts = av_rescale_q_rnd(pkt->dts, src_tb, dst_tb,AV_ROUND_NEAR_INF);
    LOGE(" encode frame dts\n ");

    if (pkt->duration > 0)
        pkt->duration = av_rescale_q(pkt->duration, src_tb, dst_tb);
    LOGE(" encode frame duration\n ");
    pkt->pos = -1;
    return 1;

}
JNIEXPORT  jint JNICALL Java_com_zagj_videocomparess_MainActivity_addFilter
        (JNIEnv *env, jobject obj, jstring videoPath, jstring picPath,jstring savaPath) {

    //avfilter
    char video_srt[500] = {0};
    char pic_srt[512] = {0};
    char sava_srt[512] = {0};
    sprintf(video_srt, "%s", env->GetStringUTFChars(videoPath, NULL));
    LOGE("open file %s \n", video_srt);
    sprintf(pic_srt, "%s", env->GetStringUTFChars(picPath, NULL));
    LOGE("open file %s \n", pic_srt);
    sprintf(sava_srt, "%s", env->GetStringUTFChars(savaPath, NULL));
    LOGE("open file %s \n", sava_srt);
    LOGE("111");
    LOGE("libs");
    LOGE("libs");
    av_register_all();
    avfilter_register_all();
    avformat_network_init();
    avdevice_register_all();
    inputContext[0] = avformat_alloc_context();
    inputContext[1] = avformat_alloc_context();

   // LOGE("OpenInput  %d\n",inputContext[0]->nb_streams);

    OpenInput(inputContext[0], video_srt, 0);
    OpenInput(inputContext[1], pic_srt, 1);
    InitDecodeCodec(0);
    InitDecodeCodec(1);
    InitEncodec(outputCodec, inputContext[0]->streams[0]->codec->width,
                inputContext[0]->streams[0]->codec->height,0);
    LOGE("Java_com_zagj_videocomparess_MainActivity_addFilter  %d\n",outputCodec->time_base.den);


    avFilterGraph = avfilter_graph_alloc();
    if(!avFilterGraph)
    {
        LOGE("graph alloc failed");
    }
    avfilter_graph_parse2(avFilterGraph, filter_descr, &inputs, &outputs);
    LOGE("avfilter_graph_parse2");
    InitInputFilter(inputs, pFilterContext[0], avFilterGraph, "MainFrame", 0);
    InitInputFilter(inputs->next, pFilterContext[1], avFilterGraph, "OverlayFrame", 1);
    LOGE("InitInputFilter");
    InitOutputFIlter(outputs, outputFilter, avFilterGraph, "out");
    LOGE("InitOutputFIlter");
    //释放所有资源
    freeInput(inputs, inputs->next, outputs);
    LOGE("freeInput");
    //检查有效性并配置图标中的所有链接和格式
    int ret = avfilter_graph_config(avFilterGraph, NULL);
    if (ret < 0) {
        LOGE("graph config failed \n");
        return ret;
    }

    OpenOutput(outputCtx, inputContext[0], sava_srt);
    LOGE("outputCtx  %d \n",outputCtx->streams[0]->codec->coded_height);
    pSrcFrame[0]=av_frame_alloc();
    pSrcFrame[1]=av_frame_alloc();

    inputFrame[0]=av_frame_alloc();
    inputFrame[1]=av_frame_alloc();
    pDstFrame=av_frame_alloc();
    int got_output = 0;
    std::thread decodeTask;
    thread thread1([&]{
        bool ret = true;
        while(ret)
        {
            auto packet = ReadPacketFromSource(1);
            ret = DecodeVideo(inputContext[1]->streams[0]->codec , pSrcFrame[1],packet.get(),1);
            if(ret) break;
        }
    });
    decodeTask.swap(thread1);
    decodeTask.join();
  /* pthread_t  pthread;
    char  *b;
    pthread_create(&pthread,NULL,task,NULL);*/

    //pthread_join(pthread,NULL);
    LOGE(" b is ");
   while (true) {
        shared_ptr<AVPacket> packet = ReadPacketFromSource(0);
        if (packet) {
            LOGE("  shared_ptr<AVPacket> ");

            if (DecodeVideo(inputContext[1]->streams[0]->codec, pSrcFrame[0], packet.get(),0)) {
                LOGE("  DecodeVideo418 ");
                av_frame_ref( inputFrame[1],pSrcFrame[1]);
                if (av_buffersrc_add_frame_flags(pFilterContext[1], inputFrame[1], AV_BUFFERSRC_FLAG_PUSH)>=0) {
                    LOGE("  av_buffersrc_add_frame_flags1");
                    pSrcFrame[1]->pts = pSrcFrame[0]->pts;
                    av_frame_ref( inputFrame[0],pSrcFrame[0]);
                    if (av_buffersrc_add_frame_flags(pFilterContext[0], inputFrame[0], AV_BUFFERSRC_FLAG_PUSH)>=0) {
                        LOGE("  av_buffersrc_add_frame_flags2 ");
                        if (av_buffersink_get_frame_flags(outputFilter, pDstFrame,AV_BUFFERSINK_FLAG_NO_REQUEST) >= 0) {
                            LOGE("  av_buffersink_get_frame_flags ");
                            std::shared_ptr<AVPacket> pTmpPkt(static_cast<AVPacket*>(av_malloc(sizeof(AVPacket))), [&](AVPacket *p) { av_packet_free(&p); av_freep(&p); });
                            av_init_packet(pTmpPkt.get());
                            LOGE("av_init_packet");

                            pTmpPkt->data=NULL;
                            pTmpPkt->size=0;
                            ret = avcodec_encode_video2(outputCodec, pTmpPkt.get(), pDstFrame, &got_output);
                           // int ret=EncodecVideo(outputCtx, pDstFrame,pTmpPkt.get());
                            LOGE("avcodec_encode_video2");
                            if (ret>=0&&got_output){
                                LOGE("av_write_frame");
                                av_rscal_q(inputContext[0]->streams[pTmpPkt.get()->stream_index]->time_base,outputCtx->streams[pTmpPkt.get()->stream_index]->time_base,pTmpPkt.get());
                                LOGE("av_rscal_q");
                                // this_thread::sleep_for(chrono::milliseconds(10));
                                LOGE("this_thread");
                                ret= av_interleaved_write_frame(outputCtx,pTmpPkt.get());
                                LOGE("av_write_frame end");
                                if (ret <0){
                                    continue;
                                }

                            }
                           // this_thread::sleep_for(chrono::milliseconds(10));
                        }
                        av_frame_unref(pDstFrame);
                    }
                }
            }

        }else break;

    }
End:
    CloseInput(inputContext[0]);
    CloseInput(inputContext[1]);
    CloseOutput(outputCtx);
    std::cout <<"Transcode file end!" << endl;
    LOGE("Transcode file end!");
    this_thread::sleep_for(chrono::hours(10));
    return 0;
}


