#include <jni.h>
#include <android/log.h>
#include "com_zagj_videocomparess_utils_MediaLive.h"
//
// Created by Chen qin lang on 2018/6/27.
//
extern "C"{
#include <libavutil/imgutils.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
}

#define LOGE(format, ...) __android_log_print(ANDROID_LOG_ERROR, "(>_<)", format,  ##__VA_ARGS__)
AVFormatContext *ofmt_ctx;
AVCodec *pCodec;
AVCodecContext *pCodecContext;
AVStream *outStream;
AVFrame *YUVFrame;
int width = 320;
int height = 240;
int fps = 15;
int yuv_width;
int yuv_height;
int y_length;
int uv_length;
int count;

JNIEXPORT jint JNICALL
Java_com_zagj_videocomparess_utils_MediaLive_initVideo(JNIEnv *env, jobject instance,
                                                          jstring url_) {
    const char *video_str = env->GetStringUTFChars(url_, nullptr);
    //计算yuv数据的长度
    yuv_width = width;
    yuv_height = height;
    y_length = width * height;
    uv_length = width * height / 4;
    av_register_all();
    avformat_network_init();
    LOGE("videoURl :%s",video_str);
    int ret = avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", video_str);
    if (ret < 0) {
        LOGE("avformat_alloc_output_context2 failed %s\n", av_err2str(ret));
        return -1;
    }
    LOGE("avformat_alloc_output_context2");
    pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
    LOGE("avcodec_find_encoder");
    if (!pCodec) {
        LOGE("find encode failed %p\n", pCodec);
        return -1;
    }
    LOGE("videoURl :%s",video_str);
    pCodecContext = avcodec_alloc_context3(pCodec);
    if (!pCodecContext) {
        LOGE("avcodec_alloc_context3 failed %p\n", pCodec);
        return -1;
    }
    pCodecContext->codec_id = pCodec->id;
    pCodecContext->bit_rate = 400000;
    pCodecContext->time_base = AVRational{1, fps};
    pCodecContext->framerate = AVRational{fps, 1};
    pCodecContext->width = width;
    pCodecContext->height = height;
    pCodecContext->gop_size = 40;
    pCodecContext->max_b_frames = 0;
    pCodecContext->has_b_frames = 0;
    pCodecContext->codec_type = AVMEDIA_TYPE_VIDEO;
    pCodecContext->pix_fmt = AV_PIX_FMT_YUV420P;
    if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
        pCodecContext->flags |= CODEC_FLAG_GLOBAL_HEADER;
    //H264 codec param
//    pCodecCtx->me_range = 16;
    //pCodecCtx->max_qdiff = 4;
    pCodecContext->qcompress = 0.6;
    //最大和最小量化系数
    pCodecContext->qmin = 10;
    pCodecContext->qmax = 51;
    //Optional Param
    //两个非B帧之间允许出现多少个B帧数
    //设置0表示不使用B帧
    //b 帧越多，图片越小
    pCodecContext->max_b_frames = 0;
    // Set H264 preset and tune
    AVDictionary *param = 0;

    LOGE("avcodec_alloc_context3");
    if (pCodecContext->codec_id == AV_CODEC_ID_H264) {
        //        av_dict_set(&param, "preset", "slow", 0);
        /**
         * 这个非常重要，如果不设置延时非常的大
         * ultrafast,superfast, veryfast, faster, fast, medium
         * slow, slower, veryslow, placebo.　这是x264编码速度的选项
       */
        ret=av_dict_set(&param, "preset", "superfast", 0);
        LOGE("av_dict_set %s",av_err2str(ret));
        ret=av_dict_set(&param, "tune", "zerolatency", 0);
        LOGE("av_dict_set %s",av_err2str(ret));
    }
    LOGE("av_dict_set");
    if (ret = avcodec_open2(pCodecContext, pCodec, &param) < 0) {
        LOGE("open codec failed %s", av_err2str(ret));
        return -1;
    }
    LOGE("avcodec_open2");
    //创建 avstream 存放视频信息
    outStream = avformat_new_stream(ofmt_ctx, pCodec);
    if (!outStream) {
        LOGE("avformat_new_stream failed %s", av_err2str(ret));
        return -1;
    }
    LOGE("avformat_new_stream");
    outStream->time_base.num = 1;
    outStream->time_base.den = fps;
    outStream->codecpar->codec_tag = 0;
    // 复制编解码上下文

    avcodec_parameters_from_context(outStream->codecpar, pCodecContext);
    LOGE("avcodec_parameters_from_context");

    av_dump_format(ofmt_ctx, 0, video_str, 1);
    LOGE("av_dump_format");
    if (ret = avio_open2(&ofmt_ctx->pb, video_str, AVIO_FLAG_READ_WRITE,NULL,NULL) < 0) {
        LOGE("avio open failed %s", av_err2str(ret));
        return -1;
    }
    LOGE("avio_open");

    if (ret = avformat_write_header(ofmt_ctx, NULL) < 0) {
        LOGE("write head failed %s", av_err2str(ret));
        return -1;
    }
    LOGE("avformat_write_header");

    return 0;
}

JNIEXPORT jint JNICALL
Java_com_zagj_videocomparess_utils_MediaLive_onFrameCallback(JNIEnv *env, jobject instance,
                                                                jbyteArray buffer_) {
    jbyte *in = env->GetByteArrayElements(buffer_, NULL);

    YUVFrame = av_frame_alloc();
    int buffer_size = av_image_get_buffer_size(pCodecContext->pix_fmt, pCodecContext->width,
                                               pCodecContext->height, 1);
    uint8_t *buffers = (uint8_t *) av_malloc(buffer_size);
    //分配填充 data ,linesize
    av_image_fill_arrays(YUVFrame->data, YUVFrame->linesize, buffers, pCodecContext->pix_fmt,
                         pCodecContext->width, pCodecContext->height, 1);

    memcpy(YUVFrame->data[0], in, y_length);
    for (int i = 0; i < uv_length; ++i) {
        *(YUVFrame->data[2] + 2 * i) = *(in + y_length + i * 2);
        *(YUVFrame->data[2] + 2 * i + 1) = *(in + y_length + i * 2 + 1);
    }
    YUVFrame->format = AV_PIX_FMT_YUV420P;
    YUVFrame->width = width;
    YUVFrame->height = height;

    int ret = avcodec_send_frame(pCodecContext, YUVFrame);
    if (ret < 0) {
        LOGE("avcodec send frame failed %s", av_err2str(ret));
        return -1;
    }
    AVPacket *packet;
    av_init_packet(packet);
    avcodec_receive_packet(pCodecContext, packet);
    if (ret < 0) {
        LOGE("avcodec_receive_packetfailed %s", av_err2str(ret));
        return -1;
    }
    packet->stream_index = outStream->index;
    AVRational rational = outStream->time_base;
    int calc_frame_duration = AV_TIME_BASE / av_q2d((outStream->r_frame_rate));
    packet->pts = count * calc_frame_duration / (av_q2d(outStream->time_base) / AV_TIME_BASE);
    packet->dts = packet->pts;
    packet->duration = calc_frame_duration / (av_q2d(outStream->time_base) / AV_TIME_BASE);
    packet->pos = -1;
    ret = av_interleaved_write_frame(ofmt_ctx, packet);
    if (ret < 0) {
        LOGE("av_interleaved_write_frame failed %s", av_err2str(ret));
        return ret;
    }
    count++;
    return 0;
}
