#include <jni.h>
#include <android/log.h>
#include "com_zagj_videocomparess_utils_MediaLive.h"
//
// Created by Chen qin lang on 2018/6/27.
//
extern "C" {
#include <libavutil/imgutils.h>
#include <libavformat/avformat.h>
#include "libavcodec/avcodec.h"
#include <libavdevice/avdevice.h>
#include <libswscale/swscale.h>
#include <libavutil/time.h>
#include <libavutil/mathematics.h>

}

#define LOGE(format, ...) __android_log_print(ANDROID_LOG_ERROR, "(>_<)", format,  ##__VA_ARGS__)
AVBitStreamFilterContext* faacbsfc = NULL;
AVFormatContext *ofmt_ctx;
AVCodec* pCodec,*pCodec_a;
AVCodecContext* pCodecCtx,*pCodecCtx_a;
AVStream* video_st,*audio_st;
AVPacket enc_pkt,enc_pkt_a;
AVFrame *pFrameYUV,*pFrame;

char *filedir;
int width = 600;
int height = 800;
int framecnt = 0;
int framecnt_a = 0;
int nb_samples = 0;
int yuv_width;
int yuv_height;
int y_length;
int uv_length;
int64_t start_time;
int aud_pts;
int vid_pts;
int frameSize = 0;
int init_video(){
    //�������ĳ�ʼ��
    pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
    if (!pCodec){
        LOGE("Can not find video encoder!\n");
        return -1;
    }
    pCodecCtx = avcodec_alloc_context3(pCodec);
    pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
    pCodecCtx->width = width;
    pCodecCtx->height = height;
    pCodecCtx->time_base.num = 1;
    pCodecCtx->time_base.den = 30;
    pCodecCtx->bit_rate = 800000;
    pCodecCtx->gop_size = 250;
    /* Some formats want stream headers to be separate. */
    if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
        pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
    pCodecCtx->qmin = 10;
    pCodecCtx->qmax = 51;
    //Optional Param
    pCodecCtx->max_b_frames = 3;
    // Set H264 preset and tune
    AVDictionary *param = 0;
    //av_dict_set(&param, "preset", "ultrafast", 0);
    av_dict_set(&param, "preset", "veryfast", 0);
    av_dict_set(&param, "tune", "zerolatency", 0);

    if (avcodec_open2(pCodecCtx, pCodec, &param) < 0){
        LOGE("Failed to open video encoder!\n");
        return -1;
    }

    //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
    video_st = avformat_new_stream(ofmt_ctx, pCodec);
    if (video_st == NULL){
        return -1;
    }
    video_st->time_base.num = 1;
    video_st->time_base.den = 30;
    video_st->codec = pCodecCtx;

    return 0;
}

int InitAudio(){
    LOGE("InitAudio");
    int ret;
    AVCodec* pcodec_a= avcodec_find_encoder(AV_CODEC_ID_AAC);
    LOGE("avcodec_find_encoder_audio %p",pcodec_a);
    if (!pcodec_a){
        LOGE("avcodec_find_encoder_audio failed ");
        return -1;
    }
    pCodecCtx_a= avcodec_alloc_context3(pcodec_a);
    if (!pCodecCtx_a){
        LOGE("avcodec_alloc_context3_pCodecCtx_a");
        return -1;
    }
    LOGE("avcodec_alloc_context3_pCodecCtx_a %p",pCodecCtx_a);

/*    pCodecCtx_a->channels=2;
    pCodecCtx_a->channel_layout=av_get_default_channel_layout(pCodecCtx_a->channels);
    pCodecCtx_a->sample_fmt=AV_SAMPLE_FMT_S16;
    pCodecCtx_a->sample_rate=44100;
    pCodecCtx_a->bit_rate=256000;
    pCodecCtx_a->time_base.num=1;
    pCodecCtx_a->time_base.den=pCodecCtx_a->sample_rate;
    pCodecCtx_a->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL;*/
    pCodecCtx_a->channels = 2;

    pCodecCtx_a->channel_layout = av_get_default_channel_layout(
            pCodecCtx_a->channels);

    pCodecCtx_a->sample_rate = 44100;//44100 8000
    pCodecCtx_a->sample_fmt = AV_SAMPLE_FMT_S16;
    pCodecCtx_a->bit_rate = 64000;
    pCodecCtx_a->time_base.num = 1;
    pCodecCtx_a->time_base.den = pCodecCtx_a->sample_rate;
    pCodecCtx_a->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL;
 //    Some formats want stream headers to be separate.
    if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
        pCodecCtx_a->flags |= CODEC_FLAG_GLOBAL_HEADER;

    if (ret=avcodec_open2(pCodecCtx_a,pcodec_a,NULL)<0){
        LOGE("avcodec_open2_failed %s",av_err2str(ret) );
       return -1;
    }
    LOGE("avcodec_open2");
    audio_st=avformat_new_stream(ofmt_ctx,pcodec_a);
    if (!audio_st){
        LOGE("avformat_new_stream %p",audio_st);
        return -1;
    }
    audio_st->time_base.num = 1;
    audio_st->time_base.den = pCodecCtx_a->sample_rate;
    audio_st->codec=pCodecCtx_a;
    return 1;

}

int init_audio(){
    pCodec_a = avcodec_find_encoder(AV_CODEC_ID_AAC);
    if(!pCodec_a){
        LOGE("Can not find audio encoder!\n");
        return -1;
    }
    pCodecCtx_a = avcodec_alloc_context3(pCodec_a);

    pCodecCtx_a->channels = 2;

    pCodecCtx_a->channel_layout = av_get_default_channel_layout(
            pCodecCtx_a->channels);

    pCodecCtx_a->sample_rate = 44100;//44100 8000
    pCodecCtx_a->sample_fmt = AV_SAMPLE_FMT_S16;
    pCodecCtx_a->bit_rate = 64000;
    pCodecCtx_a->time_base.num = 1;
    pCodecCtx_a->time_base.den = pCodecCtx_a->sample_rate;
    pCodecCtx_a->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL;
    /* Some formats want stream headers to be separate. */
    if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
        pCodecCtx_a->flags |= CODEC_FLAG_GLOBAL_HEADER;

    if(avcodec_open2(pCodecCtx_a,pCodec_a,NULL)<0){
        LOGE("Failed to open audio encoder!\n");
        return -1;
    }

    audio_st = avformat_new_stream(ofmt_ctx,pCodec_a);
    if(audio_st == NULL){
        return -1;
    }
    audio_st->time_base.num = 1;
    audio_st->time_base.den = pCodecCtx_a->sample_rate;
    audio_st->codec = pCodecCtx_a;

    return 0;
}
JNIEXPORT jint JNICALL Java_com_zagj_videocomparess_utils_MediaLive_initVideo(JNIEnv *env, jobject instance,
                                                       jstring url_) {
    const char *out_path = env->GetStringUTFChars(url_, 0);
    LOGE("open out URl %s", out_path);

    //计算yuv数据的长度
    yuv_width = width;
    yuv_height = height;
    y_length = width * height;
    uv_length = width * height / 4;

    av_register_all();
    avformat_network_init();
    avdevice_register_all();
    faacbsfc=av_bitstream_filter_init("aac_adtstoasc");
    //��ʼ�������ʽ������
    avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", /*out_path*/out_path);
    if(init_video()!=0){
        return -1;
    }

    if(InitAudio()!=0){
        return -1;
    }
    //Open output URL,set before avformat_write_header() for muxing
    if (avio_open(&ofmt_ctx->pb, out_path/*out_path*/, AVIO_FLAG_READ_WRITE) < 0){
        LOGE("Failed to open output file!\n");
        return -1;
    }

    //Write File Header
    avformat_write_header(ofmt_ctx, NULL);

    start_time = av_gettime();
 /*   //output initialize
    avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
    LOGE("avformat_alloc_output_context2!\n");
    ret=init_audio();
    if (ret<0){
        LOGE("init audio failed");
    }
    //output encoder initialize
    pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
    LOGE("avcodec_find_encoder!\n");
    if (!pCodec) {
        LOGE("Can not find encoder!\n");
        return -1;
    }
    pCodecCtx = avcodec_alloc_context3(pCodec);
    LOGE("avcodec_alloc_context3!\n");
    //编码器的ID号，这里为264编码器，可以根据video_st里的codecID 参数赋值
    pCodecCtx->codec_id = pCodec->id;
    //像素的格式，也就是说采用什么样的色彩空间来表明一个像素点
    pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
    //编码器编码的数据类型
    pCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
    //编码目标的视频帧大小，以像素为单位
    pCodecCtx->width = width;
    pCodecCtx->height = height;
    pCodecCtx->framerate = (AVRational) {fps, 1};
    //帧率的基本单位，我们用分数来表示，
    pCodecCtx->time_base = (AVRational) {1, fps};
    //目标的码率，即采样的码率；显然，采样码率越大，视频大小越大
    pCodecCtx->bit_rate = 400000;
    //固定允许的码率误差，数值越大，视频越小
//    pCodecCtx->bit_rate_tolerance = 4000000;
    pCodecCtx->gop_size = 50;
    *//* Some formats want stream headers to be separate. *//*
    if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
        pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;

    //H264 codec param
//    pCodecCtx->me_range = 16;
    //pCodecCtx->max_qdiff = 4;
    pCodecCtx->qcompress = 0.6;
    //最大和最小量化系数
    pCodecCtx->qmin = 10;
    pCodecCtx->qmax = 51;
    //Optional Param
    //两个非B帧之间允许出现多少个B帧数
    //设置0表示不使用B帧
    //b 帧越多，图片越小
    pCodecCtx->max_b_frames = 0;
    pCodecCtx->has_b_frames = 0;


    av_opt_set(pCodecCtx->priv_data, "preset", "superfast", 0);
//	av_opt_set(pCodecCtx->priv_data, "preset", "slow", 0);
    av_opt_set(pCodecCtx->priv_data, "tune", "zerolatency", 0);

    // Set H264 preset and tune
    AVDictionary *param = 0;
    //H.264
    if (pCodecCtx->codec_id == AV_CODEC_ID_H264) {
//        av_dict_set(&param, "preset", "slow", 0);
        *//**
         * 这个非常重要，如果不设置延时非常的大
         * ultrafast,superfast, veryfast, faster, fast, medium
         * slow, slower, veryslow, placebo.　这是x264编码速度的选项
       *//*
        //av_dict_set(&param, "preset", "superfast", 0);
        //av_dict_set(&param, "tune", "zerolatency", 0);
        //av_opt_set(pCodecCtx->priv_data, "preset", "superfast", 0);
        // av_opt_set(pCodecCtx->priv_data, "tune", "zerolatency", 0);
        LOGE("av_dict_set!\n");
    }

    if (avcodec_open2(pCodecCtx, pCodec, &param) < 0) {
        LOGE("Failed to open encoder!\n");
        return -1;
    }
    LOGE("avcodec_open2!\n");
    //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
    video_st = avformat_new_stream(ofmt_ctx, pCodec);
    if (video_st == NULL) {
        return -1;
    }
    LOGE("avformat_new_stream!\n");
    video_st->time_base.num = 1;
    video_st->time_base.den = fps;
//    video_st->codec = pCodecCtx;
    video_st->codecpar->codec_tag = 0;
   avcodec_parameters_from_context(video_st->codecpar, pCodecCtx);
    LOGE("avcodec_parameters_from_context!\n");

    //Open output URL,set before avformat_write_header() for muxing
    if (avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE) < 0) {
        LOGE("Failed to open output file!\n");
        return -1;
    }
    LOGE("avio_open!\n");
    //Write File Header
    avformat_write_header(ofmt_ctx, NULL);
    LOGE("avformat_write_header!\n");
    start_time = av_gettime();*/
    return 0;
}
JNIEXPORT jint JNICALL Java_com_zagj_videocomparess_utils_MediaLive_startAudio
        (JNIEnv * env, jobject obj,jbyteArray au_data,jint datasize){
   /* int got_frame;
    pFrame=av_frame_alloc();
    //初始化frame
    pFrame->nb_samples=pCodecCtx_a->frame_size;
    frameSize= pFrame->nb_samples;
    pFrame->channel_layout=pCodecCtx_a->channel_layout;
    pFrame->format=pCodecCtx_a->sample_fmt;
    pFrame->sample_rate = pCodecCtx_a->sample_rate;
    int buffer_size = av_samples_get_buffer_size(NULL, pCodecCtx_a->channels,
                                                 pCodecCtx_a->frame_size, pCodecCtx_a->sample_fmt, 1);
    uint8_t *buffer = (uint8_t *)av_malloc(buffer_size * 4);
    ret=avcodec_fill_audio_frame(pFrame,pCodecCtx_a->channel_layout,pCodecCtx_a->sample_fmt,buffer,buffer_size,1);
    LOGE("avcodec_fill_audio_frame %s",av_err2str(ret));
    jbyte *in = env->GetByteArrayElements(au_data, NULL);
    memcpy(buffer,in,datasize);
    pFrame->data[0]=buffer;
    env->ReleaseByteArrayElements(au_data,in,0);
    enc_pkt_a.data = NULL;
    enc_pkt_a.size = 0;
    av_init_packet(&enc_pkt_a);
    //nb_samples += pFrame->nb_samples;
    ret=avcodec_encode_audio2(pCodecCtx_a,&enc_pkt_a,pFrame,&got_frame);
    av_frame_free(&pFrame);
    if (got_frame){
        framecnt_a++;
        LOGE("Succeed to encode audio frame: %5d\tsize:%5d\t bufsize:%5d\n ", framecnt_a, enc_pkt_a.size,buffer_size);
        av_bitstream_filter_filter(faacbsfc,pCodecCtx_a,NULL,&enc_pkt_a.data,&enc_pkt_a.size,enc_pkt_a.data,enc_pkt_a.size,0);

        enc_pkt_a.stream_index=audio_st->index;
        AVRational time_base=ofmt_ctx->streams[audio_st->index]->time_base;
        AVRational r_frame_rate={pCodecCtx_a->sample_rate,1};
        AVRational time_base_q=AV_TIME_BASE_Q;

        int calctime=(double)AV_TIME_BASE/av_q2d(r_frame_rate);
        int  ttime=av_gettime();
        int nowTime=ttime-start_time;
        enc_pkt_a.pts=av_rescale_q(nowTime,time_base_q,time_base);
        enc_pkt_a.dts=enc_pkt_a.pts;
        enc_pkt_a.duration=av_rescale_q(calctime,time_base_q,time_base);
        enc_pkt_a.pos=-1;

        ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt_a);
        av_free_packet(&enc_pkt_a);

    }*/
    int ret;
    int enc_got_frame=0;
    int i=0;

    pFrame = av_frame_alloc();
    pFrame->nb_samples = pCodecCtx_a->frame_size;
    frameSize = pFrame->nb_samples;
    pFrame->format = pCodecCtx_a->sample_fmt;
    pFrame->channel_layout = pCodecCtx_a->channel_layout;
    pFrame->sample_rate = pCodecCtx_a->sample_rate;

    int size = av_samples_get_buffer_size(NULL,pCodecCtx_a->channels,
                                          pCodecCtx_a->frame_size,pCodecCtx_a->sample_fmt,1);
    LOGE("av_samples_get_buffer_size");
    uint8_t *frame_buf = (uint8_t *)av_malloc(size*4);
    avcodec_fill_audio_frame(pFrame,pCodecCtx_a->channels,pCodecCtx_a->sample_fmt,(const uint8_t *)frame_buf,size,1);
    LOGE("avcodec_fill_audio_frame");

    jbyte* in=env->GetByteArrayElements(au_data,0);
    LOGE("GetByteArrayElements");
    memcpy(frame_buf,in, static_cast<size_t >(datasize));
    LOGE("audio_memcpy");
    pFrame->data[0] = frame_buf;
    env->ReleaseByteArrayElements(au_data,in,0);

    enc_pkt_a.data = NULL;
    enc_pkt_a.size = 0;
    av_init_packet(&enc_pkt_a);
    nb_samples += pFrame->nb_samples;

    ret = avcodec_encode_audio2(pCodecCtx_a,&enc_pkt_a,pFrame, &enc_got_frame);
    LOGE("avcodec_encode_audio2");
    av_frame_free(&pFrame);

    if (enc_got_frame == 1){
        LOGE("Succeed to encode audio frame: %5d\tsize:%5d\t bufsize:%5d\n ", framecnt_a, enc_pkt_a.size,size);
        framecnt_a++;
        enc_pkt_a.stream_index = audio_st->index;
        av_bitstream_filter_filter(faacbsfc, pCodecCtx_a, NULL, &enc_pkt_a.data, &enc_pkt_a.size, enc_pkt_a.data, enc_pkt_a.size, 0);

        //Write PTS
        AVRational time_base=ofmt_ctx->streams[audio_st->index]->time_base;

        //��ʾһ��30֡
        AVRational r_framerate1 = {pCodecCtx_a->sample_rate, 1 };
        AVRational time_base_q = AV_TIME_BASE_Q;

        //Duration between 2 frames (us)��֮֡���ʱ����������ĵ�λ��΢��
        int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1));	//�ڲ�ʱ���

        //Parameters
        int64_t timett = av_gettime();
        int64_t now_time = timett - start_time;
        enc_pkt_a.pts = av_rescale_q(now_time, time_base_q, time_base);
        //enc_pkt_a.pts = av_rescale_q(nb_samples*calc_duration, time_base_q, time_base);
        enc_pkt_a.dts=enc_pkt_a.pts;
        enc_pkt_a.duration = av_rescale_q(calc_duration, time_base_q, time_base);
        enc_pkt_a.pos = -1;

        //��ʱ
        /*if(now_time<nb_samples*calc_duration){
            av_usleep(nb_samples*calc_duration - now_time);
        }*/
        /*aud_pts = now_time;
        if(aud_pts<vid_pts){
            av_usleep(vid_pts-aud_pts);
            LOGE("sleep %d",vid_pts-aud_pts);
        }*/
        /*aud_pts = nb_samples*calc_duration;
        int64_t pts_time = av_rescale_q(enc_pkt_a.pts,time_base,time_base_q);
        int64_t now_time = av_gettime() - start_time;
        if((pts_time > now_time) && (aud_pts + pts_time -now_time)<vid_pts)
            av_usleep(pts_time-now_time);*/

        ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt_a);
        av_free_packet(&enc_pkt_a);
    }
    return 0;

}
JNIEXPORT jint JNICALL
Java_com_zagj_videocomparess_utils_MediaLive_onFrameCallback(JNIEnv *env, jobject instance,
                                                             jbyteArray buffer_) {
    int ret;
    int enc_got_frame=0;
    int i=0;

    jbyte *in = env->GetByteArrayElements(buffer_, NULL);
    if (!in) {
        LOGE("byte is null");
        return -1;
    }
    LOGE("GetByteArrayElements");
    pFrameYUV = av_frame_alloc();
    // int buffer_size = av_image_get_buffer_size(pCodecCtx->pix_fmt, pCodecCtx->width,pCodecCtx->height, 1);
    LOGE("av_image_get_buffer_size");
    //uint8_t *buffers = (uint8_t *) av_malloc(buffer_size);
    //分配填充 data ,linesize
    uint8_t *out_buffer = (uint8_t *) av_malloc(
            avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
    avpicture_fill((AVPicture *) pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width,
                   pCodecCtx->height);

    // av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffers, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, 1);
    LOGE("av_image_fill_arrays");
    memcpy(pFrameYUV->data[0], in, y_length);

   // pFrameYUV->pts = count;
    LOGE("memcpy");
    for (int i = 0; i < uv_length; ++i) {
        *(pFrameYUV->data[2] + i) = *(in + y_length + i * 2);
        *(pFrameYUV->data[1] + i) = *(in + y_length + i * 2 + 1);
    }
    env->ReleaseByteArrayElements(buffer_,in,0);
    pFrameYUV->format = AV_PIX_FMT_YUV420P;
    pFrameYUV->width = yuv_width;
    pFrameYUV->height = yuv_height;

    enc_pkt.data = NULL;
    enc_pkt.size = 0;
    av_init_packet(&enc_pkt);
    ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);

    /*  int ret = avcodec_send_frame(pCodecCtx, pFrameYUV);
      LOGE("avcodec_send_frame");
      if (ret < 0) {
          LOGE("avcodec send frame failed %s", av_err2str(ret));
          return -1;
      }


      LOGE("av_init_packet");
      start_time = av_gettime();
      ret= avcodec_receive_packet(pCodecCtx, &enc_pkt);*/
    LOGE("avcodec_receive_packet");
    //是否编码前的YUV数据
    av_frame_free(&pFrameYUV);
    LOGE("av_frame_free");
/*    if (ret < 0|| enc_pkt.size <= 0) {
        LOGE("avcodec_receive_packetfailed %s", av_err2str(ret));
        return -1;
    }*/
    /*  enc_pkt.stream_index = video_st->index;
      AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
      enc_pkt.pts = count * (video_st->time_base.den) / ((video_st->time_base.num) * fps);
      enc_pkt.dts = enc_pkt.pts;
      enc_pkt.duration = (video_st->time_base.den) / ((video_st->time_base.num) * fps);*/
    // enc_pkt.stream_index = video_st->index;
//    AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
//    enc_pkt.pts = count * (video_st->time_base.den) / ((video_st->time_base.num) * fps);
//    enc_pkt.dts = enc_pkt.pts;
//    enc_pkt.duration = (video_st->time_base.den) / ((video_st->time_base.num) * fps);
    //AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
    /*
     *    // AVRational r_frame_rate = {fps, 1};
      //帧率的基本单位，我们用分数来表示，
      //AVRational time_base = {1, fps};
     * double clca_frame_time=(double)AV_TIME_BASE/(double)(av_q2d(r_frame_rate));
      enc_pkt.pts = (double)count * clca_frame_time /(double) (av_q2d(time_base) * AV_TIME_BASE);
      enc_pkt.dts = enc_pkt.pts;
      enc_pkt.duration =(double) clca_frame_time/(double)(av_q2d(time_base) * AV_TIME_BASE);
    */

    /*   //Delay
       int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, AV_TIME_BASE_Q);
       int64_t now_time = av_gettime() - start_time;
       if (pts_time > now_time)
           av_usleep(pts_time - now_time);*/
    //enc_pkt.stream_index = video_st->index;

    if (enc_got_frame == 1) {
        LOGE("Succeed to encode video frame: %5d\tsize:%5d\n", framecnt, enc_pkt.size);
        framecnt++;
        enc_pkt.stream_index = video_st->index;

        //Write PTS
        AVRational time_base = ofmt_ctx->streams[0]->time_base;

        //��ʾһ��30֡
        AVRational r_framerate1 = {30, 1};
        AVRational time_base_q = AV_TIME_BASE_Q;

        //Duration between 2 frames (us)��֮֡���ʱ����������ĵ�λ��΢��
        int64_t calc_duration = (double) (AV_TIME_BASE) * (1 / av_q2d(r_framerate1));    //�ڲ�ʱ���
        //Parameters
        /*vid_pts = framecnt*calc_duration;
        enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
        enc_pkt.dts=enc_pkt.pts;
        enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);
        enc_pkt.pos = -1;*/

        int64_t timett = av_gettime();
        int64_t now_time = timett - start_time;
        vid_pts = now_time;
        enc_pkt.pts = av_rescale_q(now_time, time_base_q, time_base);
        enc_pkt.dts = enc_pkt.pts;
        enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);
        enc_pkt.pos = -1;
        //enc_pkt.pts = av_rescale_q(enc_pkt.pts,pCodecCtx->time_base, video_st->time_base);
        //enc_pkt.dts = av_rescale_q(enc_pkt.dts, pCodecCtx->time_base, video_st->time_base);
        //enc_pkt.duration = av_rescale_q(enc_pkt.duration, pCodecCtx->time_base, video_st->time_base);
        //enc_pkt.duration = ((double)AV_TIME_BASE /(av_q2d(pCodecCtx->framerate)) )/ ((av_q2d(pCodecCtx->time_base))*AV_TIME_BASE);
        enc_pkt.pos = -1;
        LOGE("index:%d,pts:%lld,dts:%lld,duration:%lld,time_base:%d,%d",
             framecnt,
             (long long) enc_pkt.pts,
             (long long) enc_pkt.dts,
             (long long) enc_pkt.duration,
             time_base.num, time_base.den);
        ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);

        LOGE("av_interleaved_write_frame");
        if (ret < 0) {
            LOGE("av_interleaved_write_frame failed %s", av_err2str(ret));
            return ret;
        }
    }

    av_free_packet(&enc_pkt);
    //av_frame_unref(pFrameYUV);
    //env->ReleaseByteArrayElements(buffer_, in, 0);
    //env->ReleaseStringUTFChars(,);
    return 0;
}

JNIEXPORT jint JNICALL
Java_com_zagj_videocomparess_utils_MediaLive_stop(JNIEnv *env, jobject instance) {
    if (video_st)
        avcodec_close(video_st->codec);
    if (ofmt_ctx) {
        avio_close(ofmt_ctx->pb);
        avformat_free_context(ofmt_ctx);
        ofmt_ctx = NULL;
    }
    return 0;
}

JNIEXPORT jint JNICALL
Java_com_zagj_videocomparess_utils_MediaLive_openUrl
        (JNIEnv *env, jobject obj, jstring input_jstr, jstring output_jstr) {
    AVOutputFormat *ofmt = NULL;
    AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;
    AVPacket pkt;

    int ret, i;
    char input_str[500] = {0};
    char output_str[500] = {0};
    char info[1000] = {0};
    sprintf(input_str, "%s", env->GetStringUTFChars(input_jstr, NULL));
    sprintf(output_str, "%s", env->GetStringUTFChars(output_jstr, NULL));

    //input_str  = "cuc_ieschool.flv";
    //output_str = "rtmp://localhost/publishlive/livestream";
    //output_str = "rtp://233.233.233.233:6666";

    //FFmpeg av_log() callback
    // av_log_set_callback(custom_log);

    av_register_all();
    //Network
    avformat_network_init();

    //Input
    if ((ret = avformat_open_input(&ifmt_ctx, input_str, 0, 0)) < 0) {
        LOGE("Could not open input file.");
        return ret;

    }
    if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0) {
        LOGE("Failed to retrieve input stream information");
        return ret;
    }

    int videoindex = -1;
    for (i = 0; i < ifmt_ctx->nb_streams; i++)
        if (ifmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoindex = i;
            break;
        }
    //Output
    avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", output_str); //RTMP
    //avformat_alloc_output_context2(&ofmt_ctx, NULL, "mpegts", output_str);//UDP

    if (!ofmt_ctx) {
        LOGE("Could not create output context\n");
        ret = AVERROR_UNKNOWN;
        return ret;
    }
    ofmt = ofmt_ctx->oformat;
    for (i = 0; i < ifmt_ctx->nb_streams; i++) {
        //Create output AVStream according to input AVStream
        AVStream *in_stream = ifmt_ctx->streams[i];
        AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
        if (!out_stream) {
            LOGE("Failed allocating output stream\n");
            ret = AVERROR_UNKNOWN;
            return ret;
        }
        //Copy the settings of AVCodecContext
        ret = avcodec_copy_context(out_stream->codec, in_stream->codec);
        if (ret < 0) {
            LOGE("Failed to copy context from input to output stream codec context\n");
            return ret;
        }
        out_stream->codec->codec_tag = 0;
        if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
            out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
    }

    //Open output URL
    if (!(ofmt->flags & AVFMT_NOFILE)) {
        ret = avio_open(&ofmt_ctx->pb, output_str, AVIO_FLAG_WRITE);
        if (ret < 0) {
            LOGE("Could not open output URL '%s'", output_str);
            return ret;
        }
    }
    //Write file header
    ret = avformat_write_header(ofmt_ctx, NULL);
    if (ret < 0) {
        LOGE("Error occurred when opening output URL\n");
        return ret;
    }

    int frame_index = 0;

    int64_t start_time = av_gettime();
    while (1) {
        AVStream *in_stream, *out_stream;
        //Get an AVPacket
        ret = av_read_frame(ifmt_ctx, &pkt);
        if (ret < 0)
            break;
        //FIX：No PTS (Example: Raw H.264)
        //Simple Write PTS
        if (pkt.pts == AV_NOPTS_VALUE) {
            //Write PTS
            AVRational time_base1 = ifmt_ctx->streams[videoindex]->time_base;
            //Duration between 2 frames (us)
            int64_t calc_duration =
                    (double) AV_TIME_BASE / av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);
            //Parameters
            pkt.pts = (double) (frame_index * calc_duration) /
                      (double) (av_q2d(time_base1) * AV_TIME_BASE);
            pkt.dts = pkt.pts;
            pkt.duration = (double) calc_duration / (double) (av_q2d(time_base1) * AV_TIME_BASE);
        }
        //Important:Delay
        if (pkt.stream_index == videoindex) {
            AVRational time_base = ifmt_ctx->streams[videoindex]->time_base;
            AVRational time_base_q = {1, AV_TIME_BASE};
            int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);
            int64_t now_time = av_gettime() - start_time;
            if (pts_time > now_time)
                av_usleep(pts_time - now_time);

        }

        in_stream = ifmt_ctx->streams[pkt.stream_index];
        out_stream = ofmt_ctx->streams[pkt.stream_index];
        /* copy packet */
        //Convert PTS/DTS
        pkt.pts = av_rescale_q(pkt.pts, in_stream->time_base, out_stream->time_base
        );
        pkt.dts = av_rescale_q(pkt.dts, in_stream->time_base, out_stream->time_base/*,
                                   AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX*/);
        pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
        pkt.pos = -1;
        //Print to Screen
        if (pkt.stream_index == videoindex) {
            LOGE("Send %8d video frames to output URL\n", frame_index);
            frame_index++;
        }
        //ret = av_write_frame(ofmt_ctx, &pkt);
        ret = av_interleaved_write_frame(ofmt_ctx, &pkt);

        if (ret < 0) {
            LOGE("Error muxing packet\n");
            break;
        }
        av_free_packet(&pkt);

    }
    //Write file trailer
    av_write_trailer(ofmt_ctx);
/*error:
      avformat_close_input(&ifmt_ctx);
      *//* close output *//*
      if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
        avio_close(ofmt_ctx->pb);
      avformat_free_context(ofmt_ctx);
      if (ret < 0 && ret != AVERROR_EOF) {
        LOGE("Error occurred.\n");
        return -1;
      }*/
    return 0;
    End:
    avformat_close_input(&ifmt_ctx);
    if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
        avio_close(ofmt_ctx->pb);
    avformat_free_context(ofmt_ctx);
    if (ret < 0 && ret != AVERROR_EOF) {
        LOGE("Error occurred.\n");
        return -1;
    }
    return 0;


}

