#include <jni.h>
#include <string>
#include <android/log.h>
#include <android/native_window_jni.h>
#include <android/native_window.h>
#include "AacCodec.h"
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN,"testff",__VA_ARGS__)

extern "C" {
#include <libavcodec/avcodec.h>
#include <libx264/x264.h>
#include <libfaac/faac.h>
#include <libfaac/faaccfg.h>
}

//x264编码输入图像YUV420P
x264_picture_t pic_in;
x264_picture_t pic_out;
//YUV个数
int y_len, u_len, v_len;
//x264编码处理器
x264_t *video_encode_handle;
FILE *f = NULL;
FILE *fAudio = NULL;

//faac音频编码处理器
faacEncHandle audio_encode_handle;
unsigned long nInputSamples; //输入的采样个数
unsigned long nMaxOutputBytes; //编码输出之后的字节数

AacCodec *aacCodec = NULL;

extern "C"


JNIEXPORT void JNICALL
Java_com_dongnaoedu_live_jni_PushNative_startPush(JNIEnv *env, jobject instance, jstring url_) {
    const char *url = env->GetStringUTFChars(url_, 0);

    // TODO

    env->ReleaseStringUTFChars(url_, url);
}


/**
 * 设置视频参数
 * @param env
 * @param instance
 * @param width
 * @param height
 * @param bitrate
 * @param fps
 */
extern "C"
JNIEXPORT void JNICALL
Java_com_dongnaoedu_live_jni_PushNative_setVideoOptions(JNIEnv *env, jobject instance, jint width,
                                                        jint height, jint bitrate, jint fps) {
    x264_param_t param;
    //x264_param_default_preset 设置
    x264_param_default_preset(&param, "ultrafast", "zerolatency");
    //编码输入的像素格式YUV420P
    param.i_csp = X264_CSP_I420;
    param.i_width = width;
    param.i_height = height;

    y_len = width * height;
    u_len = y_len / 4;
    v_len = u_len;

    //参数i_rc_method表示码率控制，CQP(恒定质量)，CRF(恒定码率)，ABR(平均码率)
    //恒定码率，会尽量控制在固定码率
    param.rc.i_rc_method = X264_RC_CRF;
    param.rc.i_bitrate = bitrate / 1000; //* 码率(比特率,单位Kbps)
    param.rc.i_vbv_max_bitrate = bitrate / 1000 * 1.2; //瞬时最大码率

    //码率控制不通过timebase和timestamp，而是fps
    param.b_vfr_input = 0;
    param.i_fps_num = fps; //* 帧率分子
    param.i_fps_den = 1; //* 帧率分母
    param.i_timebase_den = param.i_fps_num;
    param.i_timebase_num = param.i_fps_den;
    param.i_threads = 1;//并行编码线程数量，0默认为多线程

    //是否把SPS和PPS放入每一个关键帧
    //SPS Sequence Parameter Set 序列参数集，PPS Picture Parameter Set 图像参数集
    //为了提高图像的纠错能力
    param.b_repeat_headers = 1;
    //设置Level级别
    param.i_level_idc = 51;
    //设置Profile档次
    //baseline级别，没有B帧
    x264_param_apply_profile(&param, "baseline");

    //x264_picture_t（输入图像）初始化
    x264_picture_alloc(&pic_in, param.i_csp, param.i_width, param.i_height);

    //打开编码器
    video_encode_handle = x264_encoder_open(&param);
    if (video_encode_handle) {
        LOGW("打开编码器成功...");
    }

}


extern "C"
JNIEXPORT void JNICALL
Java_com_dongnaoedu_live_jni_PushNative_setAudioOptions(JNIEnv *env, jobject instance,
                                                        jint sampleRateInHz, jint numChannels) {

    audio_encode_handle = faacEncOpen(sampleRateInHz, numChannels, &nInputSamples,
                                      &nMaxOutputBytes);
    if (!audio_encode_handle) {
        LOGW("音频编码器打开失败");
        return;
    }
    //设置音频编码参数
    faacEncConfigurationPtr p_config = faacEncGetCurrentConfiguration(audio_encode_handle);
    p_config->mpegVersion = MPEG4;
    p_config->allowMidside = 1;
    p_config->aacObjectType = LOW;
    p_config->outputFormat = 0; //输出是否包含ADTS头
    p_config->useTns = 1; //时域噪音控制,大概就是消爆音
    p_config->useLfe = 0;
//	p_config->inputFormat = FAAC_INPUT_16BIT;
    p_config->quantqual = 100;
    p_config->bandWidth = 0; //频宽
    p_config->shortctl = SHORTCTL_NORMAL;

    if (!faacEncSetConfiguration(audio_encode_handle, p_config)) {
        LOGW("%s", "音频编码器配置失败..");
        return;
    }

    LOGW("%s", "音频编码器配置成功");

}



extern "C"
JNIEXPORT void JNICALL
Java_com_dongnaoedu_live_jni_PushNative_fireVideo(JNIEnv *env, jobject instance,
                                                  jbyteArray buffer) {
    //视频数据转为YUV420P
    //NV21->YUV420P
    jbyte *nv21_buffer = env->GetByteArrayElements(buffer, NULL);
    jbyte *u = reinterpret_cast<jbyte *>(pic_in.img.plane[1]);
    jbyte *v = reinterpret_cast<jbyte *>(pic_in.img.plane[2]);
    //nv21 4:2:0 Formats, 12 Bits per Pixel
    //nv21与yuv420p，y个数一致，uv位置对调
    //nv21转yuv420p  y = w*h,u/v=w*h/4
    //nv21 = yvu yuv420p=yuv y=y u=y+1+1 v=y+1
    memcpy(pic_in.img.plane[0], nv21_buffer, y_len);
    int i;
    for (i = 0; i < u_len; i++) {
        *(u + i) = *(nv21_buffer + y_len + i * 2 + 1);
        *(v + i) = *(nv21_buffer + y_len + i * 2);
    }

    //h264编码得到NALU数组
    x264_nal_t *nal = NULL; //NAL
    int n_nal = -1; //NALU的个数
    //进行h264编码
    int i_frame_size = x264_encoder_encode(video_encode_handle, &nal, &n_nal, &pic_in, &pic_out);
    if (i_frame_size < 0) {
        LOGW("%s", "编码失败");
    } else if (i_frame_size) {
        LOGW("%s", "编码成功");
        if (f == NULL) {
            f = fopen("/sdcard/encode_yuan_nal.h264", "ab+");
        }
        for (int i = 0; i < n_nal; i++) {
            fwrite((nal + i)->p_payload, 1, (nal + i)->i_payload, f);
        }
    }
}



extern "C"
JNIEXPORT void JNICALL
Java_com_dongnaoedu_live_jni_PushNative_fireAudio(JNIEnv *env, jobject instance, jbyteArray buffer,
                                                  jint len) {
    int32_t *pcmbuf;
    unsigned char *bitbuf;
    jbyte *b_buffer = env->GetByteArrayElements(buffer, 0);
    pcmbuf = (int32_t *) malloc(nInputSamples * sizeof(int));
    bitbuf = (unsigned char *) malloc(nMaxOutputBytes * sizeof(unsigned char));
    int nByteCount = 0;
    unsigned int nBufferSize = (unsigned int) len / 2;
    unsigned short *buf = (unsigned short *) b_buffer;
    while (nByteCount < nBufferSize) {
        int audioLength = nInputSamples;
        if ((nByteCount + nInputSamples) >= nBufferSize) {
            audioLength = nBufferSize - nByteCount;
        }
        int i;
        for (i = 0; i < audioLength; i++) {//每次从实时的pcm音频队列中读出量化位数为8的pcm数据。
            int s = ((int16_t *) buf + nByteCount)[i];
            pcmbuf[i] = s << 8;//用8个二进制位来表示一个采样量化点（模数转换）
        }
        nByteCount += nInputSamples;
        //利用FAAC进行编码，pcmbuf为转换后的pcm流数据，audioLength为调用faacEncOpen时得到的输入采样数，bitbuf为编码后的数据buff，nMaxOutputBytes为调用faacEncOpen时得到的最大输出字节数
        int byteslen = faacEncEncode(audio_encode_handle, pcmbuf, audioLength,
                                     bitbuf, nMaxOutputBytes);
        if (byteslen < 1) {
            continue;
        }
        // add_aac_body(bitbuf, byteslen);//从bitbuf中得到编码后的aac数据流，放到数据队列
        if (fAudio == NULL) {
            fAudio = fopen("/sdcard/encode_aac.aac", "ab+");
        }
        fwrite(bitbuf, 1, byteslen, fAudio);
        fflush(fAudio);
        LOGW("...音频采集成功...");
    }
    env->ReleaseByteArrayElements(buffer, b_buffer, NULL);
    if (bitbuf)
        free(bitbuf);
    if (pcmbuf)
        free(pcmbuf);
}






//音频编码
extern "C"
JNIEXPORT void JNICALL
Java_com_dongnaoedu_live_jni_PushNative_fireAudio2(JNIEnv *env, jobject instance, jbyteArray pcm,
                                                   jint len) {
    LOGW("Java_aacEncoder_jni_FFAacEncoderJni_native_set_pcmData");
    jbyte *pcmData = env->GetByteArrayElements(pcm, 0);
    if(!pcmData){
        LOGW("set pcm data fail");
        return;
    }
//    jclass clazz = env->GetObjectClass(thiz);
//    jfieldID fieldId = env->GetFieldID(clazz, "mNativeContext", "I");
//    AacCodec *aacCodec = (AacCodec *)env->GetIntField(thiz,fieldId);
    aacCodec->encode_pcm_data(pcmData, len);
    env->ReleaseByteArrayElements(pcm, pcmData, 0);

}

//设置音频参数
extern "C"
JNIEXPORT void JNICALL
Java_com_dongnaoedu_live_jni_PushNative_setAudioOptions2(JNIEnv *env, jobject instance,
                                                         jint sampleRateInHz, jint channel) {

    LOGW("Java_aacEncoder_jni_FFAacEncoderJni_native_start");
   /* jclass clazz = env->GetObjectClass(instance);
    jfieldID fieldId = env->GetFieldID(clazz, "mNativeContext", "I");
    AacCodec *aacCodec = (AacCodec *)env->GetIntField(instance,fieldId);
    if(aacCodec){
        delete aacCodec;
        aacCodec = NULL;
    }*/
    aacCodec = new AacCodec();
    aacCodec->start();

    //env->SetIntField(thiz, fieldId, (int)aacCodec);

}

extern "C"
JNIEXPORT void JNICALL
Java_com_dongnaoedu_live_jni_PushNative_stopPush(JNIEnv *env, jobject instance) {

    LOGW("Java_aacEncoder_jni_FFAacEncoderJni_native_stop");
//    jclass clazz = env->GetObjectClass(thiz);
//    jfieldID fieldId = env->GetFieldID(clazz, "mNativeContext", "I");
//    AacCodec *aacCodec = (AacCodec *)env->GetIntField(thiz,fieldId);
//    if(!aacCodec){
//    }
    aacCodec->stop();
    // env->SetIntField(thiz, fieldId, 0);
}extern "C"

JNIEXPORT void JNICALL
Java_com_dongnaoedu_live_jni_PushNative_release(JNIEnv *env, jobject instance) {

    // TODO

}