#include <jni.h>
#include <string>


#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <math.h>

#include <android/log.h>
#define  LOG_TAG    "FfmpegWriter"
#define  LOGI(...)  __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGD(...)  __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)


extern "C" {
    #include <libavutil/avassert.h>
    #include <libavutil/channel_layout.h>
    #include <libavutil/opt.h>
    #include <libavutil/mathematics.h>
    #include <libavutil/timestamp.h>
    #include <libavformat/avformat.h>
    #include <libswscale/swscale.h>
    #include <libswresample/swresample.h>
#include "EasyAACEncoderAPI.h"
}


static AVStream *video_st;
static AVStream *audio_st;

static int64_t video_frameIndex;
static int64_t audio_frameIndex;
static  AVFormatContext *oc;
static  AVOutputFormat *fmt;
static  int fps;
static  int audioFps;
static  int audioFormat;

static EasyAACEncoder_Handle handle;



extern "C" JNIEXPORT jstring JNICALL
Java_com_roy_ffmpegwriter_MainActivity_stringFromJNI(
        JNIEnv* env,
        jobject /* this */) {
    std::string hello = "Hello from C++";
    return env->NewStringUTF(hello.c_str());
}


extern "C"
JNIEXPORT void JNICALL
Java_com_roy_ffmpegwriter_FfmpegWriter_startRecordWithFilePath(JNIEnv *env, jobject thiz,
                                                               jstring file_path, jint width,
                                                               jint height, jint frame_rate,
                                                               jint video_format, jint audio_format,
                                                               jint audio_sample_rate,
                                                               jint audio_channel,
                                                               jint audio_frame_rate,
                                                               jint audio_bits_rate) {
    const  char * info = av_version_info();
    LOGD("av version is %s",info);
    bool isHevc = video_format == 0x56565268; //是否h265码
    const char *filename = env->GetStringUTFChars(file_path,NULL); //路径
    LOGD("file is %s format is width:%d height:%d frameRate:%d videoFormat:0x%x audioFormat:0x%x "
         "audioSampleRate:%d audioChannel:%d audioFrameRate:%d audioBitsRate:%d",filename,width,height,
         frame_rate,video_format,audio_format,audio_sample_rate,audio_channel,audio_frame_rate,audio_bits_rate);
    fps = frame_rate;
    audioFps = audio_frame_rate;
    AVCodec *audio_codec, *video_codec;
    AVStream *audioStream,*videoStream;
    video_frameIndex =0;
    audio_frameIndex =0;
    AVDictionary *opt = NULL;
    //初始化
    av_register_all();

    /* allocate the output media context */
    int ret = avformat_alloc_output_context2(&oc, NULL, NULL, filename);
    if (!oc) {
        LOGD("Could not deduce output format from file extension: using MPEG. ret is %d",ret);
        //打不开就用mpeg模式默认
        avformat_alloc_output_context2(&oc, NULL, "mpeg", filename);
    }
    if (!oc)
        LOGD("Crash:can't open this file %s",filename);

    video_codec = avcodec_find_decoder(isHevc?AV_CODEC_ID_H265:AV_CODEC_ID_H264);
    if (!video_codec) {
        LOGD("Crash:Could not find encoder for '%s' ",
                avcodec_get_name(isHevc?AV_CODEC_ID_H265:AV_CODEC_ID_H264));
    }


    videoStream = avformat_new_stream(oc,video_codec);
    if (!videoStream) {
        LOGD("Crash:Could not allocate video stream\n");
    }
    videoStream->time_base = (AVRational){ 1, fps};
    videoStream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
    videoStream->codecpar->width = width;
    videoStream->codecpar->height = height;
    videoStream->codecpar->bit_rate = 600000;
    videoStream->codecpar->codec_id = isHevc?AV_CODEC_ID_H265:AV_CODEC_ID_H264;
    videoStream->codecpar->format = AV_PIX_FMT_YUV420P;


    //AVContext 也设置一遍
    AVCodecContext *videoContext = videoStream->codec;
    videoContext->width = width;
    videoContext->codec_type = AVMEDIA_TYPE_VIDEO;
    videoContext->height = height;
    videoContext->bit_rate = 6000000;
    videoContext->codec_id = isHevc?AV_CODEC_ID_H265:AV_CODEC_ID_H264;
    videoContext->time_base = (AVRational){ 1, fps};
    videoContext->pix_fmt = AV_PIX_FMT_YUV420P;

    ret = avcodec_open2(videoContext,video_codec,&opt);
    if(ret<0)
        LOGD("Failed to open video encoder!\n");

  //  enum AVCodecID audioID = audio_format == 0x7A25? AV_CODEC_ID_PCM_MULAW:AV_CODEC_ID_PCM_ALAW;
    bool  isG711U = audio_format == 0x7A25;
    audioFormat = audio_format;
    enum AVCodecID audioID = AV_CODEC_ID_AAC;
    audio_codec = avcodec_find_decoder(audioID);
    if (!audio_codec) {
        LOGD("Crash:Could not find encoder for '%s' ",
                avcodec_get_name(audioID));
    }
    audioStream = avformat_new_stream(oc,audio_codec);
    if (!audioStream) {
        LOGD("Crash:Could not allocate audio stream\n");
    }
    audioStream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
    audioStream->codecpar->format =  audio_codec->sample_fmts ?audio_codec->sample_fmts[0] : AV_SAMPLE_FMT_FLTP;
    audioStream->codecpar->bit_rate = audio_bits_rate;
    audioStream->codecpar->codec_id = audioID;
    audioStream->codecpar->channel_layout = AV_CH_LAYOUT_MONO;
    audioStream->codecpar->channels = av_get_channel_layout_nb_channels(audio_channel);
    audioStream->codecpar->sample_rate = audio_sample_rate;
    audioStream->codecpar->frame_size = AV_CODEC_CAP_VARIABLE_FRAME_SIZE;//？？？
    audioStream->time_base = (AVRational){ 1, audio_frame_rate};
    //AVContext
    AVCodecContext *audioContext = audioStream->codec;
    audioContext->codec_type = AVMEDIA_TYPE_AUDIO;
    audioContext->bit_rate = audio_bits_rate;
    audioContext->codec_id = audioID;
    audioContext->channel_layout = AV_CH_LAYOUT_MONO;
    audioContext->channels = 1;
    audioContext->sample_rate = audio_sample_rate;
    audioContext->time_base = (AVRational){ 1, audio_frame_rate};
    audioContext->frame_size = AV_CODEC_CAP_VARIABLE_FRAME_SIZE;
    audioContext->sample_fmt = audio_codec->sample_fmts ?audio_codec->sample_fmts[0] : AV_SAMPLE_FMT_FLTP;
    ret = avcodec_open2(audioContext,audio_codec,&opt);
    if(ret<0)
        LOGD("Failed to open audio encoder!\n");

    video_st = videoStream;
    audio_st = audioStream;

    av_dump_format(oc, 0, filename, 1);

    fmt = oc->oformat; //输出的容器格式
//    if (!(fmt->flags & AVFMT_NOFILE)) {
    ret = avio_open(&oc->pb, filename, AVIO_FLAG_WRITE);
    if (ret < 0) {
        LOGD("Crash:Could not open '%s': %s\n", filename,
             av_err2str(ret));
    }
//    }


    ret = avformat_write_header(oc, &opt);
    if (ret < 0) {
        LOGD("Crash: Error occurred when opening output file %s: %s %d\n",
                filename,av_err2str(ret),ret);
    }
    InitParam initParam;
    initParam.u32AudioSamplerate=audio_sample_rate;
    initParam.ucAudioChannel=1;
    initParam.u32PCMBitSize=16;
    initParam.ucAudioCodec =audio_format==0x7A25?Law_ULaw:Law_ALaw;    // Law_uLaw  Law_ALaw
    handle = Easy_AACEncoder_Init(initParam);

}

extern "C"
JNIEXPORT void JNICALL
Java_com_roy_ffmpegwriter_FfmpegWriter_addVideoData(JNIEnv *env, jobject thiz, jbyteArray data,
                                                    jint length, jboolean is_iframe) {
    uint8_t *buf = (uint8_t*)env->GetByteArrayElements(data,NULL);
    int size = length;
    int ret;
    AVStream *pst = oc->streams[0];

    //创建pkt
    AVPacket pkt;
    av_init_packet(&pkt);
    pkt.flags |= is_iframe?AV_PKT_FLAG_KEY:0;
    pkt.stream_index = pst->index;
    pkt.data = buf;
    pkt.size = size;


    int m_fps = fps;
    int64_t m_frame_index = video_frameIndex;
    // 计算每一帧的长度
    int64_t calc_duration = (double)AV_TIME_BASE / m_fps;

    // 计算该帧的显示时间戳
    pkt.pts = (double)(m_frame_index*calc_duration) / (double)(av_q2d(pst->time_base)*AV_TIME_BASE);

    // 解码时间戳和显示时间戳相等  因为视频中没有b帧
    pkt.dts = pkt.pts;
    // 帧的时长
    pkt.duration = (double)calc_duration / (double)(av_q2d(pst->time_base)*AV_TIME_BASE);

    video_frameIndex++;

    // 换算时间戳 （换算成已输出流中的时间基为单位的显示时间戳）

    pkt.pts = av_rescale_q_rnd(pkt.pts, pst->time_base, pst->time_base,
                               static_cast<AVRounding>((AVRounding) AV_ROUND_NEAR_INF |
                                                       AV_ROUND_PASS_MINMAX));
    pkt.dts = av_rescale_q_rnd(pkt.dts, pst->time_base, pst->time_base,
                               static_cast<AVRounding>(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
    pkt.duration = av_rescale_q(pkt.duration, pst->time_base, pst->time_base);
    pkt.pos = -1;

    ret = av_interleaved_write_frame(oc, &pkt );
    if (ret< 0) {
        LOGD("cannot write video frame,but also continue");
    }

    env->ReleaseByteArrayElements(data,(jbyte*)buf,0);
}

extern "C"
JNIEXPORT void JNICALL
Java_com_roy_ffmpegwriter_FfmpegWriter_addAudioData(JNIEnv *env, jobject thiz, jbyteArray data,
                                                    jint length) {
    //此处需要转换

    uint8_t *buf = (uint8_t*)env->GetByteArrayElements(data,NULL);
    int size = length;

    unsigned int pcmSize = (unsigned int)5*size;
    uint8_t *pcmBuff = (uint8_t*)malloc(pcmSize);
    int resSize = Easy_AACEncoder_Encode(handle,buf,size,pcmBuff,&pcmSize);
    int ret;
    AVStream *pst = oc->streams[1];

    //创建pkt
    AVPacket pkt;

    av_init_packet(&pkt);

    pkt.stream_index = pst->index;
    pkt.data = pcmBuff;
    pkt.size = pcmSize;

    int m_fps = audioFps;
    int64_t m_frame_index = audio_frameIndex;
    // 计算每一帧的长度
    int64_t calc_duration = (double)AV_TIME_BASE / m_fps;

    // 计算该帧的显示时间戳
    pkt.pts = (double)(m_frame_index*calc_duration) / (double)(av_q2d(pst->time_base)*AV_TIME_BASE);

    // 解码时间戳和显示时间戳相等  因为视频中没有b帧
    pkt.dts = pkt.pts;
    // 帧的时长
    pkt.duration = (double)calc_duration / (double)(av_q2d(pst->time_base)*AV_TIME_BASE);

    audio_frameIndex++;

    // 换算时间戳 （换算成已输出流中的时间基为单位的显示时间戳）
    pkt.pts = av_rescale_q_rnd(pkt.pts, pst->time_base, pst->time_base,
                               static_cast<AVRounding>(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
    pkt.dts = av_rescale_q_rnd(pkt.dts, pst->time_base, pst->time_base,
                               static_cast<AVRounding>(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
    pkt.duration = av_rescale_q(pkt.duration, pst->time_base, pst->time_base);
    pkt.pos = -1;

    ret = av_interleaved_write_frame(oc, &pkt );
    if (ret< 0) {
        LOGD("cannot write audio frame,but also continue");
    }
    env->ReleaseByteArrayElements(data,(jbyte*)buf,0);
    free(pcmBuff);
}

extern "C"
JNIEXPORT void JNICALL
Java_com_roy_ffmpegwriter_FfmpegWriter_stopRecord(JNIEnv *env, jobject thiz) {
    av_write_trailer(oc);

    if (!(fmt->flags & AVFMT_NOFILE))
        /* Close the output file. */
        avio_closep(&oc->pb);

    avformat_free_context(oc);
    Easy_AACEncoder_Release(handle);
}

extern "C"
JNIEXPORT void JNICALL
Java_com_roy_ffmpegwriter_FfmpegWriter_checkLog(JNIEnv *env, jobject thiz) {
    LOGD("Java_com_roy_ffmpegwriter_ffmpegWriter_checkLog");
}

extern "C"
JNIEXPORT jint JNICALL
Java_com_roy_ffmpegwriter_FfmpegWriter_checkJNI(JNIEnv *env, jobject thiz) {
        return 30;
}

