#include <jni.h>
#include <string>
#include <stdlib.h>
#include <stdio.h>
#include <unistd.h>
#include <android/log.h>
#include <android/native_window_jni.h>
#include <android/native_window.h>

#define LOGI(format, ...) \
    __android_log_print(ANDROID_LOG_INFO, TAG,  format, ##__VA_ARGS__)

#define LOGD(format, ...) \
    __android_log_print(ANDROID_LOG_DEBUG,TAG,format,##__VA_ARGS__)

#define LOGE(format, ...) \
    __android_log_print(ANDROID_LOG_ERROR,TAG,format,##__VA_ARGS__)


char *TAG = "ffmpeg-native";

extern "C" { //ffmpeg头文件加上extern "C" 解决undefined reference to `avcodec_find_decoder(AVCodecID)'
//封装格式
#include "libavformat/avformat.h"
//解码
#include "libavcodec/avcodec.h"
#include "libavcodec/avcodec.h"
//缩放
#include "libswscale/swscale.h"

#include "libswresample/swresample.h"
#include "libavutil/time.h"

#include "libavutil/imgutils.h"
#include "libavutil/opt.h"

#include "libavutil/imgutils.h"
#include "libavutil/samplefmt.h"
#include "libavformat/avformat.h"
#include "libavutil/frame.h"
#include "libavutil/mem.h"
#include "libswscale/swscale.h"
#include "libswresample/swresample.h"
#include "libavutil/opt.h"
#include "libavfilter/avfilter.h"
#include "libavcodec/avcodec.h"
#include "libavfilter/buffersink.h"
#include "libavfilter/buffersrc.h"
}

#define MAX_AUDIO_FRME_SIZE 48000 * 4
//编码器上下文保存的实体
struct EnCodeBean {
    FILE *f;
    AVFrame *frame;
    AVPacket *pkt;
    AVCodecContext *c = NULL;
    int width = 0;
    int height = 0;
};

EnCodeBean *videoEncodeObj = NULL;

AVCodecContext *pCodecCtx= NULL;
AVPacket avpkt;
FILE * video_file;
unsigned char *outbuf=NULL;
unsigned char *yuv420buf=NULL;
static int outsize=0;

//编码每一帧
static void encode(AVCodecContext *enc_ctx, AVFrame *frame, AVPacket *pkt,
                   FILE *outfile);

//初始化编码器
static jint initEnCodec(JNIEnv *env, jobject jobject1, jstring path, jint w, jint h) {
    videoEncodeObj = new EnCodeBean;
    char filename[300], *codec_name;
    const AVCodec *codec;
    int i, ret, x, y;
    uint8_t endcode[] = {0, 0, 1, 0xb7};
    char errorArr[500];

    videoEncodeObj->width = w;
    videoEncodeObj->height = h;

//    filename = "/storage/emulated/0/Download/jason_video.h265";
    //获取路径参数
    const char *_path = env->GetStringUTFChars(path, 0);
    sprintf(filename, "%s", _path);
    env->ReleaseStringUTFChars(path, _path);

    AVCodecID videoCodec = AV_CODEC_ID_MPEG2VIDEO; // 编码器类型

    /* 查找编码器 */
    codec = avcodec_find_encoder(videoCodec);
    LOGE("%s\n", avcodec_get_name(videoCodec));
    if (!codec) {
        sprintf(errorArr, "Codec '%s' not found\n", avcodec_get_name(videoCodec));
        LOGE("%s", errorArr);
        return -1;
    }

    //编码上下文
    videoEncodeObj->c = avcodec_alloc_context3(codec);
    if (!videoEncodeObj->c) {
        sprintf(errorArr, "Could not allocate video codec context\n");
        LOGE("%s",  errorArr);
        return -1;
    }

    //编码后存储的包packet
    videoEncodeObj->pkt = av_packet_alloc();
    if (!videoEncodeObj->pkt) {
        sprintf(errorArr, "packet init error\n");
        LOGE("%s", errorArr);
        return -1;
    }

    /* 设置参数 */
    videoEncodeObj->c->bit_rate = 639 * 1000;//码率 质量
    /* resolution must be a multiple of two */
    videoEncodeObj->c->width = w;
    videoEncodeObj->c->height = h;
    /* frames per second */
    videoEncodeObj->c->time_base = (AVRational) {1, 25};//时间单位 时基
    videoEncodeObj->c->framerate = (AVRational) {25, 1};//频率

    videoEncodeObj->c->gop_size = 10;
    videoEncodeObj->c->max_b_frames = 1;
    videoEncodeObj->c->pix_fmt = AV_PIX_FMT_YUV420P;//图像格式

    if (codec->id == AV_CODEC_ID_H264)
        av_opt_set(videoEncodeObj->c->priv_data, "preset", "slow", 0);

    /* 打开编码器 */
    ret = avcodec_open2(videoEncodeObj->c, codec, NULL);
    if (ret < 0) {
        sprintf(errorArr, "Could not open codec: %s\n", av_err2str(ret));
        LOGE("%s",  errorArr);
        return -1;
    }

    //打开创建文件
    videoEncodeObj->f = fopen(filename, "wb");
    if (!videoEncodeObj->f) {
        sprintf(errorArr, "Could not open %s\n", filename);
        LOGE("%s",  errorArr);
        return -1;
    }

    //每帧储存内存创建
    videoEncodeObj->frame = av_frame_alloc();
    if (!videoEncodeObj->frame) {
        sprintf(errorArr, "Could not allocate video frame\n");
        LOGE("%s",  errorArr);
        return -1;
    }
    videoEncodeObj->frame->format = videoEncodeObj->c->pix_fmt;
    videoEncodeObj->frame->width = videoEncodeObj->c->width;
    videoEncodeObj->frame->height = videoEncodeObj->c->height;

    //根据参数构建buf
    ret = av_frame_get_buffer(videoEncodeObj->frame, 16);//32
    if (ret < 0) {
        sprintf(errorArr, "Could not allocate the video frame data\n");
        LOGE("%s",  errorArr);
        return -1;
    }
    LOGE("init ok %p", videoEncodeObj);
    return 0;
}

//编码每一帧
static void encode(AVCodecContext *enc_ctx, AVFrame *frame, AVPacket *pkt,
                   FILE *outfile) {
    int ret;
    char errorStr[500];

    /* send the frame to the encoder */
    if (frame)
        LOGE("%s","Send frame %lld\n", frame->pts);

    //开始编码
    ret = avcodec_send_frame(enc_ctx, frame);
    if (ret < 0) {
         LOGE("%s", "Error sending a frame for encoding\n");
        return;
    }

    while (ret >= 0) {
        //接受编码
        ret = avcodec_receive_packet(enc_ctx, pkt);
        if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
            return;
        else if (ret < 0) {
             LOGE("%s","Error during encoding\n");
            return;
        }

        LOGE("%s","Write packet %lld (size=%5d)\n", pkt->pts, pkt->size);
        //写入文件
        fwrite(pkt->data, 1, pkt->size, outfile);
        av_packet_unref(pkt);
    }
}

//Java推送流的编码方法
static jint pushFrame(JNIEnv *env, jobject jobject1, jlong pts, jbyteArray jbyteArray1) {

    /*int len=env->GetArrayLength(jbyteArray1);
    unsigned char *buf = new unsigned char[len];
    env->GetByteArrayRegion(jbyteArray1, 0, len, reinterpret_cast<jbyte *>(buf));

    int i, ret;
    char errorArr[500];
    i = pts;
    videoEncodeObj->frame->pts = i;
    ret = av_frame_make_writable(videoEncodeObj->frame);
    if (ret < 0) {
        LOGE( "av_frame_make_writable is error %d", ret);
        return -1;
    }

    int frameSize = videoEncodeObj->width*videoEncodeObj->height;

    //将流数据复制进入frame buffer
    memcpy(videoEncodeObj->frame->data[0], buf, frameSize);
    memcpy(videoEncodeObj->frame->data[1], buf+frameSize, frameSize/4);
    memcpy(videoEncodeObj->frame->data[2], buf+frameSize+frameSize/4, frameSize/4);


    *//* 开始编码 *//*
    encode(videoEncodeObj->c, videoEncodeObj->frame, videoEncodeObj->pkt, videoEncodeObj->f);

    //释放引用
    env->ReleaseByteArrayElements(jbyteArray1, reinterpret_cast<jbyte *>(buf), 0);
    return 0;*/
    //获取yuv 流
    int len=env->GetArrayLength(jbyteArray1);
    unsigned char *buf = new unsigned char[len];
    env->GetByteArrayRegion(jbyteArray1, 0, len, reinterpret_cast<jbyte *>(buf));

    /* make sure the frame data is writable */
    int i, ret;
    char errorArr[500];
    i = pts;
    videoEncodeObj->frame->pts += 4500*pts;
    videoEncodeObj->frame->pkt_dts += 4500*pts;

    ret = av_frame_make_writable(videoEncodeObj->frame);
    if (ret < 0) {
        LOGE( "av_frame_make_writable is error %ld  %ld  %d!!!",videoEncodeObj->frame->pts, videoEncodeObj->frame->pkt_dts, ret);
        return -1;
    }

    int frameSize = videoEncodeObj->width*videoEncodeObj->height;

    //将流数据复制进入frame buffer
    memcpy(videoEncodeObj->frame->data[0], buf, frameSize);
    memcpy(videoEncodeObj->frame->data[1], buf+frameSize, frameSize/4);
    memcpy(videoEncodeObj->frame->data[2], buf+frameSize+frameSize/4, frameSize/4);

    videoEncodeObj->frame->pts = i;

    /* 开始编码 */
    encode(videoEncodeObj->c, videoEncodeObj->frame, videoEncodeObj->pkt, videoEncodeObj->f);

    //释放引用
    env->ReleaseByteArrayElements(jbyteArray1, reinterpret_cast<jbyte *>(buf), 0);
    return 0;
}

//关闭编码功能
static jint endClose(JNIEnv *env, jobject jobject1) {
    uint8_t endcode[] = {0, 0, 1, 0xb7};
    /* 清空缓存区 */
    encode(videoEncodeObj->c, NULL, videoEncodeObj->pkt, videoEncodeObj->f);

    /* 写入文件尾部 */
    fwrite(endcode, 1, sizeof(endcode), videoEncodeObj->f);
    fclose(videoEncodeObj->f);

    //释放上下文
    avcodec_free_context(&videoEncodeObj->c);
    av_frame_free(&videoEncodeObj->frame);
    av_packet_free(&videoEncodeObj->pkt);
    LOGE("is end ok");

    delete videoEncodeObj;
    videoEncodeObj = NULL;

    LOGE("free out is ok");
    return 0;
}

static jint showMsg(JNIEnv *env, jobject jobject1, jstring jstring1) {
    const char *instr = env->GetStringUTFChars(jstring1, 0);
    //LOGE( instr);
    env->ReleaseStringUTFChars(jstring1, instr);
    return 0;
}

//----------------------------------jni 动态注册方法-----------------------------------------

static JNINativeMethod javaMethods[] = {
        {"initEnCodec", "(Ljava/lang/String;II)I", (void *) initEnCodec},
        {"pushFrame",   "(J[B)I",                  (void *) pushFrame},
        {"showMsg",     "(Ljava/lang/String;)I",   (void *) showMsg},
        {"endClose",    "()I",                     (void *) endClose}
};

jint JNI_OnLoad(JavaVM *vm, void *unused) {
    JNIEnv *env = NULL;
    if (vm->GetEnv((void **) &env, JNI_VERSION_1_4) != JNI_OK) {
        LOGE( "和获取env异常");
        return -1;
    }

    const char *className = "com/senseauto/camera2record/VideoEnCodec";
    int methodNum = sizeof(javaMethods) / sizeof(JNINativeMethod);

    jclass jclass1 = env->FindClass(className);
    if (jclass1 == NULL) {
        LOGE( "find class error");
        return -1;
    }

    int ret = env->RegisterNatives(jclass1, javaMethods, methodNum);
    if (ret < 0) {
        env->DeleteLocalRef(jclass1);
        return -1;
    }
    env->DeleteLocalRef(jclass1);
    return JNI_VERSION_1_4;
} 