#include<android/log.h>
#include <jni.h>
#include <string>
#include "log.h"


extern "C" {
#include "libavutil/log.h"
#include "libavformat/avformat.h"
#include "libavutil/dict.h"
#include "libavcodec/avcodec.h"
#include "libavutil/timestamp.h"
}

static void log_packet(const AVFormatContext *fmt_ctx, const AVPacket *pkt, const char *tag) {
    AVRational *time_base = &fmt_ctx->streams[pkt->stream_index]->time_base;

    printf("%s: pts:%s pts_time:%s dts:%s dts_time:%s duration:%s duration_time:%s stream_index:%d\n",
           tag,
           av_ts2str(pkt->pts), av_ts2timestr(pkt->pts, time_base),
           av_ts2str(pkt->dts), av_ts2timestr(pkt->dts, time_base),
           av_ts2str(pkt->duration), av_ts2timestr(pkt->duration, time_base),
           pkt->stream_index);
}

extern "C"
JNIEXPORT jstring JNICALL
Java_com_example_helloffmpeg_system_FfmpegHelper_getVersion(JNIEnv *env, jobject thiz) {
    LOGD("current version is %s",  av_version_info());
    return env->NewStringUTF(av_version_info());
}


extern "C"
JNIEXPORT jstring JNICALL
Java_com_example_helloffmpeg_system_FfmpegHelper_printFileInfo(JNIEnv *env, jobject thiz,jstring file_path) {
    const char *path = env->GetStringUTFChars(file_path, nullptr);
    LOGD("the parse path is %s",  path);

    AVFormatContext *fmt_ctx = avformat_alloc_context();
    AVDictionaryEntry *dictionaryEntry = nullptr;

    if(!fmt_ctx){
        env->ReleaseStringUTFChars(file_path, path);
        return env->NewStringUTF("Could not allocate format context");
    }

    //1.打开视频文件
    int ret = avformat_open_input(&fmt_ctx,path, nullptr, nullptr);
    if(ret < 0){
        LOGD( "open path error %s", av_err2str(ret));
        avformat_free_context(fmt_ctx);
        env->ReleaseStringUTFChars(file_path, path);
        return env->NewStringUTF("Could not open video file");
    }

    //2.读取流信息
    ret = avformat_find_stream_info(fmt_ctx, nullptr);
    if(ret<0){
        LOGD( "avformat_find_stream_info error %s", av_err2str(ret));
        avformat_close_input(&fmt_ctx);
        return env->NewStringUTF("Could not find stream");
    }

    //3.解析metaData
    while ((dictionaryEntry = av_dict_get(fmt_ctx->metadata, "", dictionaryEntry,
                                          AV_DICT_IGNORE_SUFFIX))) {
        LOGD("%s=%s\n", dictionaryEntry->key,dictionaryEntry->value);
    }

    //4.找到视频流索引
    int videoStreamIndex = -1;
    for(unsigned int i=0;i< fmt_ctx->nb_streams;i++){
        if(fmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO){
            videoStreamIndex = i;
            break;
        }
    }
    if(videoStreamIndex == -1){
        avformat_close_input(&fmt_ctx);
        return env->NewStringUTF("Could not find a video stream");
    }
    //5.获取视频流和编解码参数
    AVStream *videoStream = fmt_ctx->streams[videoStreamIndex];
    AVCodecParameters *codecParams = videoStream->codecpar;

    //6.获取帧率
    AVRational frameRate =  av_guess_frame_rate(fmt_ctx,videoStream, NULL);
    double fps = av_q2d(frameRate);

    //7.获取宽高、时长
    int width = codecParams->width;
    int height = codecParams->height;
    int64_t duration = fmt_ctx->duration/AV_TIME_BASE;

    //8.释放资源
    avformat_close_input(&fmt_ctx);
    char info[256];
    sprintf(info, "FPS: %.2f, Width: %d, Height: %d, Duration: %lld seconds", fps, width, height, duration);
    LOGD("info %s\n", info);
    return env->NewStringUTF(info);
}
extern "C"
JNIEXPORT jint JNICALL
Java_com_example_helloffmpeg_system_FfmpegHelper_remux(JNIEnv *env, jobject thiz, jstring file_path,
                                                       jstring dst_path) {
    const char *srcPath = env->GetStringUTFChars(file_path, nullptr);
    const char *dstPath = env->GetStringUTFChars(dst_path, nullptr);
    const AVOutputFormat *ofmt = nullptr;
    AVFormatContext *ifmt_ctx = nullptr, *ofmt_ctx = nullptr;
    AVPacket pkt;
    int ret, i;
    int stream_index = 0;
    int *stream_mapping = nullptr;
    int stream_mapping_size = 0;


    //1.ifmt_ctx(打开，读取，打印输入文件信息)
    ret = avformat_open_input(&ifmt_ctx, srcPath, nullptr, nullptr);
    if (ret < 0) {
        LOGD("Could not open input file %s", av_err2str(ret));
        goto end;
    }

    ret = avformat_find_stream_info(ifmt_ctx,0);
    if(ret<0){
        LOGD("Failed to retrieve input stream information");
        goto end;
    }

    av_dump_format(ifmt_ctx, 0, srcPath, 0);

    //2.ofmt_ctx、ofmt
    avformat_alloc_output_context2(&ofmt_ctx, nullptr, nullptr,dstPath);
    if(!ofmt_ctx){
        ret = AVERROR_UNKNOWN;
        LOGD("Could not create output context");
        goto end;
    }
    ofmt = ofmt_ctx->oformat;

    stream_mapping_size = ifmt_ctx->nb_streams;
    stream_mapping = (int *)av_malloc_array(stream_mapping_size,sizeof(*stream_mapping));
    if(!stream_mapping){
        ret = AVERROR(ENOMEM);
        goto end;
    }

    //3.遍历每一路流,然后赋值解码参数
    for(i=0;i<ifmt_ctx->nb_streams;i++){
        AVStream *out_stream;
        AVStream *in_stream = ifmt_ctx->streams[i];
        AVCodecParameters *in_codecpar = in_stream->codecpar;

        //3.1输出文件中只保留了音频流、视频流和字幕流
        if(in_codecpar->codec_type != AVMEDIA_TYPE_AUDIO &&
                in_codecpar->codec_type != AVMEDIA_TYPE_VIDEO &&
                in_codecpar->codec_type != AVMEDIA_TYPE_SUBTITLE){
            stream_mapping[i] = -1;
            continue;
        }
        //3.2 ostream
        //角标是此路流在输入文件中的索引
        //值是此路流在输出文件中的索引
        stream_mapping[i] = stream_index++;

        out_stream = avformat_new_stream(ofmt_ctx, nullptr);
        if(!out_stream){
            ret = AVERROR_UNKNOWN;
            LOGD( "Failed allocating output stream\n");
            goto end;
        }

        ret = avcodec_parameters_copy(out_stream->codecpar, in_codecpar);
        if (ret < 0) {
            LOGD("Failed to copy codec parameters\n");
            goto end;
        }
        out_stream->codecpar->codec_tag = 0;
    }

    av_dump_format(ofmt_ctx, 0, dstPath, 1);

    //4.媒体流写入
    if (!(ofmt->flags & AVFMT_NOFILE)) {
        //创建并初始化AVIOContext
        ret = avio_open(&ofmt_ctx->pb, dstPath, AVIO_FLAG_WRITE);
        if (ret < 0) {
            LOGD("Could not open output file '%s'", dstPath);
            goto end;
        }
    }

    ret = avformat_write_header(ofmt_ctx, nullptr);
    if (ret < 0) {
        LOGD("Error occurred when opening output file\n");
        goto end;
    }

    while (1) {
        AVStream *in_stream, *out_stream;

        //读取packet
        ret = av_read_frame(ifmt_ctx, &pkt);
        if (ret < 0) break;

        in_stream = ifmt_ctx->streams[pkt.stream_index];
        //无效的流 或 不是我们想要的流
        if (pkt.stream_index >= stream_mapping_size ||
            stream_mapping[pkt.stream_index] < 0) {
            av_packet_unref(&pkt);
            continue;
        }

        pkt.stream_index = stream_mapping[pkt.stream_index];
        out_stream = ofmt_ctx->streams[pkt.stream_index];
        log_packet(ifmt_ctx, &pkt, "in");

        //输入流和输出流的时间基可能不同，因此要根据时间基的不同对时间戳pts进行转换
        pkt.pts = av_rescale_q(pkt.pts, in_stream->time_base, out_stream->time_base);
        pkt.dts = av_rescale_q(pkt.dts, in_stream->time_base, out_stream->time_base);
        //根据时间基转换duration
        pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
        pkt.pos = -1;
        log_packet(ofmt_ctx, &pkt, "out");

        //写入packet数据
        ret = av_interleaved_write_frame(ofmt_ctx, &pkt);
        if (ret < 0) {
            LOGD("Error muxing packet\n");
            break;
        }
        av_packet_unref(&pkt);
    }

    //写媒体文件尾信息
    av_write_trailer(ofmt_ctx);

    end:
    env->ReleaseStringUTFChars(file_path, srcPath);
    env->ReleaseStringUTFChars(dst_path, dstPath);
    if (ifmt_ctx) avformat_close_input(&ifmt_ctx);
    if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE)) avio_closep(&ofmt_ctx->pb);
    if (ofmt_ctx) avformat_free_context(ofmt_ctx);
    if (stream_mapping) av_freep(&stream_mapping);
    if (ret < 0 && ret != AVERROR_EOF) {
        LOGD("Error occurred: %s\n", av_err2str(ret));
        return 1;
    }

    return 0;
}