#include <jni.h>
#include <string>
#include <malloc.h>
#include <x264.h>
#include "android/log.h"
#include "librtmp/rtmp.h"
#include <queue>
#include <faac.h>

extern "C" {
#include "x264.h"
}

#define  LOG_TAG    "aruba"
#define  LOGE(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
#define  LOGD(...)  __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)

void send_264_header(unsigned char sps[100], unsigned char pps[100], int len, int pps_len);

void put(RTMPPacket *pPacket);

RTMPPacket *get();

void send_264_body(uint8_t *payload, int i_payload);

void send_aac_body(unsigned char *buffer, int len);

//y u v 分别所占字节
int y_len, u_len, v_len;
//裸数据
x264_picture_t *pic;
//编码后的数据
x264_picture_t *pic_out;
//编码器
x264_t *encoder;

//RTMPPacket队列
std::queue<RTMPPacket *> queue;
pthread_t *pid;
pthread_mutex_t mutex;
pthread_cond_t cond;
bool isPublishing = false;

//开始推流的时间
uint32_t start_time;
//推流地址
char *path;

//音频编码器
faacEncHandle handle;
//音频缓冲区
unsigned long inputSamples;
//缓冲区最大字节数
unsigned long maxOutputBytes;

//回调java
JavaVM *jvm;
jobject jPublisherObj;

JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *reserved) {
    jvm = vm;
    JNIEnv *env = NULL;
    jint result = -1;
    if (jvm) {
        LOGD("jvm init success");
    }
    if (vm->GetEnv((void **) &env, JNI_VERSION_1_4) != JNI_OK) {
        return result;
    }
    return JNI_VERSION_1_4;
}

/**
 * 调用java方法
 * @param env 
 * @param methodId 
 * @param code 
 */
void throwNativeInfo(JNIEnv *env, jmethodID methodId, int code) {
    if (env && methodId && jPublisherObj) {
        env->CallVoidMethodA(jPublisherObj, methodId, (jvalue *) &code);
    }
}

/**
 * rtmp发送头信息
 * @param sps 
 * @param pps 
 * @param len 
 * @param pps_len 
 */
void send_264_header(unsigned char *sps, unsigned char *pps, int sps_len, int pps_len) {
    int size = sps_len + pps_len + 16;//组包rtmp头信息需要额外16个字节
    RTMPPacket *packet = static_cast<RTMPPacket *>(malloc(sizeof(RTMPPacket)));
    //初始化内部缓冲区
    RTMPPacket_Alloc(packet, size);

    //组包
    unsigned char *body = reinterpret_cast<unsigned char *>(packet->m_body);
    int i = 0;
    body[i++] = 0x17;
    body[i++] = 0x00;
    body[i++] = 0x00;
    body[i++] = 0x00;
    body[i++] = 0x00;
    //版本号
    body[i++] = 0x01;
    //profile
    body[i++] = sps[1];
    //兼容性
    body[i++] = sps[2];
    //profile_level baseline
    body[i++] = sps[3];
    body[i++] = 0xff;
    body[i++] = 0xe1;
    //sps长度
    body[i++] = (sps_len >> 8) & 0xff;
    body[i++] = sps_len & 0xff;
    //sps内容
    memcpy(&body[i], sps, sps_len);
    i += sps_len;//指针偏移长度

    //pps
    body[i++] = 0x01;
    //pps长度
    body[i++] = (pps_len >> 8) & 0xff;
    body[i++] = pps_len & 0xff;
    memcpy(&body[i], pps, pps_len);

    //packet参数设置
    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;//视频类型
    packet->m_nBodySize = size;
    //客户端通过pts自己做同步
    packet->m_nTimeStamp = 0;
    packet->m_hasAbsTimestamp = 0;
    //指定通道
    packet->m_nChannel = 4;
    packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;

    //放入队列
    put(packet);
}

/**
 * RTMP发送关键帧和非关键帧
 * @param payload 
 * @param i_payload 
 */
void send_264_body(uint8_t *payload, int i_payload) {
    if (payload[2] == 0x00) {//第三位为0x00的情况，无用信息为前4位：0000 0001
        payload += 4;
        i_payload -= 4;
    } else if (payload[2] == 0x01) {//第三位为0x01的情况，无用信息为前3位：0000 01
        payload += 3;
        i_payload -= 3;
    }

    //组包
    int size = i_payload + 9;//组包rtmp帧数据需要额外的9个字节
    RTMPPacket *packet = static_cast<RTMPPacket *>(malloc(sizeof(RTMPPacket)));
    //初始化内部缓冲区
    RTMPPacket_Alloc(packet, size);

    char *body = packet->m_body;
    int type = payload[0] & 0x1f;
    int index = 0;
    if (type == NAL_SLICE_IDR) {//关键帧
        body[index++] = 0x17;
    } else {//非关键帧
        body[index++] = 0x27;
    }

    body[index++] = 0x01;
    body[index++] = 0x00;
    body[index++] = 0x00;
    body[index++] = 0x00;

    //长度，占4个字节
    body[index++] = (i_payload >> 24) & 0xff;
    body[index++] = (i_payload >> 16) & 0xff;
    body[index++] = (i_payload >> 8) & 0xff;
    body[index++] = i_payload & 0xff;

    //存放数据
    memcpy(&body[index], payload, i_payload);

    //packet参数设置
    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;//视频类型
    packet->m_nBodySize = size;
    //客户端通过pts自己做同步
    packet->m_nTimeStamp = RTMP_GetTime() - start_time;//为了让客户端知道播放进度
    packet->m_hasAbsTimestamp = 0;
    //指定通道
    packet->m_nChannel = 0x04;
    packet->m_headerType = RTMP_PACKET_SIZE_LARGE;

    put(packet);
}

//生产者
void put(RTMPPacket *pPacket) {
    pthread_mutex_lock(&mutex);
    if (isPublishing) {
        queue.push(pPacket);
    }

    pthread_cond_signal(&cond);
    pthread_mutex_unlock(&mutex);
}

//消费者
RTMPPacket *get() {
    pthread_mutex_lock(&mutex);
    if (queue.empty()) {
        pthread_cond_wait(&cond, &mutex);
    }

    RTMPPacket *packet = NULL;
    if (!queue.empty()) {
        packet = queue.front();
        queue.pop();
    }

    pthread_mutex_unlock(&mutex);

    return packet;
}

/**
 * 设置视频参数
 */
extern "C"
JNIEXPORT void JNICALL
Java_com_aruba_rtmppushapplication_push_natives_NativePush_setVideoParams(JNIEnv *env,
                                                                          jobject instance,
                                                                          jint width, jint height,
                                                                          jint bitrate, jint fps) {
    if (pic != NULL) {
        x264_picture_clean(pic);
        free(pic);
        free(pic_out);
        pic = NULL;
        pic_out = NULL;
    }
    y_len = width * height;
    u_len = y_len / 4;
    v_len = u_len;

    //设置参数
    x264_param_t param;
    //    zerolatency预设以下内容
    //            param->rc.i_lookahead = 0;
    //            param->i_sync_lookahead = 0;
    //            param->i_bframe = 0;
    //            param->b_sliced_threads = 1;
    //            param->b_vfr_input = 0;
    //            param->rc.b_mb_tree = 0;
    x264_param_default_preset(&param, x264_preset_names[0], "zerolatency");
    //设置支持的分辨率,默认就是51
    param.i_level_idc = 51;
    //推流的格式
    param.i_csp = X264_CSP_I420;
    //视频宽高
    param.i_width = width;
    param.i_height = height;
    param.i_threads = 1;

    //1秒多少帧
    param.i_timebase_num = fps;
    param.i_timebase_den = 1;
    param.i_fps_num = fps;
    param.i_fps_den = 1;
    //关键帧最大间隔时间的帧率
    param.i_keyint_max = fps * 2;

    //ABR:平均码率  CQP:恒定质量  CRF:恒定码率 
    param.rc.i_rc_method = X264_RC_ABR;
    //码率
    param.rc.i_bitrate = bitrate / 1000;
    //最大码率
    param.rc.i_vbv_max_bitrate = bitrate / 1000 * 1.2;
    //缓冲区大小
    param.rc.i_vbv_buffer_size = bitrate / 1000;

    //0:别的客户端使用pts做同步 1:推流端计算timebase做同步
    param.b_vfr_input = 0;
    //使用sps pps
    param.b_repeat_headers = 1;
    //码流级别,baseline只提供i和p帧,降低延迟,提供很好的兼容性
    x264_param_apply_profile(&param, "baseline");

    //获取解码器
    encoder = x264_encoder_open(&param);
    if (!encoder) {
        LOGE("打开视频编码器失败");
        jmethodID errorId = env->GetMethodID(env->GetObjectClass(instance), "onPostNativeError",
                                             "(I)V");
        throwNativeInfo(env, errorId, -98);
        return;
    }

    pic = (x264_picture_t *) (malloc(sizeof(x264_picture_t)));
    //调用内置函数初始化pic,pic存放yuv420数据
    x264_picture_alloc(pic, X264_CSP_I420, width, height);
    pic_out = (x264_picture_t *) (malloc(sizeof(x264_picture_t)));
    LOGE("视频编码器打开完成");
}

void release(JNIEnv *env) {
    if (encoder) {
        x264_encoder_close(encoder);
        encoder = NULL;
    }

    if (handle) {
        faacEncClose(handle);
        handle = 0;
    }

    if (pic) {
        x264_picture_clean(pic);
        free(pic);
        free(pic_out);
        pic = NULL;
        pic_out = NULL;
    }

    if (pid) {
        pthread_cond_destroy(&cond);
        pthread_mutex_destroy(&mutex);
    }

    for (int i = 0; i < queue.size(); ++i) {
        RTMPPacket *pkt = queue.front();
        queue.pop();
        //释放内存
        RTMPPacket_Free(pkt);
        free(pkt);
    }
    pid = NULL;

    if (jPublisherObj) {
        env->DeleteGlobalRef(jPublisherObj);
        jPublisherObj = NULL;
    }
}

/**
 * 编码视频
 */
extern "C"
JNIEXPORT void JNICALL
Java_com_aruba_rtmppushapplication_push_natives_NativePush_pushVideo(JNIEnv *env, jobject instance,
                                                                     jbyteArray buffer_) {
    if (!isPublishing || !encoder || !pic) {
        return;
    }
    jbyte *buffer = env->GetByteArrayElements(buffer_, NULL);

    uint8_t *u = pic->img.plane[1];
    uint8_t *v = pic->img.plane[2];
    //将nv21转换为yuv420
    for (int i = 0; i < u_len; i++) {
        *(u + i) = *(buffer + y_len + i * 2 + 1);
        *(v + i) = *(buffer + y_len + i * 2);
    }
    memcpy(pic->img.plane[0], buffer, y_len);
//    pic->img.plane[0] = buffer;

    //nalu
    x264_nal_t *nal = 0;
    //nalu数量
    int pi_nal;
    int ret = x264_encoder_encode(encoder, &nal, &pi_nal, pic, pic_out);
    if (ret < 0) {
        env->ReleaseByteArrayElements(buffer_, buffer, 0);
        LOGE("编码失败");
        return;
    }

    //解包，将获取的有效数据交由rtmp编码
    unsigned char sps[100];
    unsigned char pps[100];
    int sps_len = 0;
    int pps_len = 0;

    for (int i = 0; i < pi_nal; i++) {
        if (nal[i].i_type == NAL_SPS) {//序列参数集
            //去除分隔符(占4个字节)
            sps_len = nal[i].i_payload - 4;
            //获取到有效数据
            memcpy(sps, nal[i].p_payload + 4, sps_len);
        } else if (nal[i].i_type == NAL_PPS) {//图像参数集
            pps_len = nal[i].i_payload - 4;
            memcpy(pps, nal[i].p_payload + 4, pps_len);

            //sps和pps都获取到后，发送头信息
            send_264_header(sps, pps, sps_len, pps_len);
        } else {//发送关键帧和非关键帧
            send_264_body(nal[i].p_payload, nal[i].i_payload);
        }
    }

    env->ReleaseByteArrayElements(buffer_, buffer, 0);
}

/**
 * 推流线程 
 * @param arg 
 * @return 
 */
void *startPush(void *arg) {
    pthread_mutex_lock(&mutex);
    isPublishing = true;
    pthread_mutex_unlock(&mutex);

    JNIEnv *env;
    jvm->AttachCurrentThread(&env, 0);
    jclass clazz = env->GetObjectClass(jPublisherObj);
    jmethodID errorId = env->GetMethodID(clazz, "onPostNativeError", "(I)V");
    jmethodID stateId = env->GetMethodID(clazz, "onPostNativeState", "(I)V");

    //rtmp连接
    RTMP *connect = RTMP_Alloc();
    RTMP_Init(connect);
    connect->Link.timeout = 5;//超时时间
    RTMP_SetupURL(connect, path);//设置地址
    RTMP_EnableWrite(connect);
    if (!RTMP_Connect(connect, NULL)) {//建立socket
        //建立失败
        LOGE("建立rtmp连接失败");
        //回调java层
        throwNativeInfo(env, errorId, -99);
        pthread_mutex_lock(&mutex);

        isPublishing = false;
        RTMP_Close(connect);
        RTMP_Free(connect);
        free(path);
        path = NULL;

        pthread_mutex_unlock(&mutex);
        release(env);

        jvm->DetachCurrentThread();
        pthread_exit(0);
    }
    RTMP_ConnectStream(connect, 0);//连接流
    LOGE("推流连接建立");
    throwNativeInfo(env, stateId, 100);

    while (isPublishing) {
        RTMPPacket *packet = get();
        if (packet == NULL) {
            continue;
        }

        //推流
        packet->m_nInfoField2 = connect->m_stream_id;
        int ret = RTMP_SendPacket(connect, packet, 1);//1:使用rtmp本身的上传队列
        if (!ret) {
            LOGE("rtmp断开");
            throwNativeInfo(env, errorId, -100);
        }

        RTMPPacket_Free(packet);
        free(packet);
    }

    LOGE("结束推流");
    //释放
    RTMP_Close(connect);
    RTMP_Free(connect);
    free(path);
    path = NULL;
    throwNativeInfo(env, stateId, 101);
    release(env);
    jvm->DetachCurrentThread();
    pthread_exit(0);
}

/**
 * 开始推流线程
 */
extern "C"
JNIEXPORT void JNICALL
Java_com_aruba_rtmppushapplication_push_natives_NativePush_startPush(JNIEnv *env, jobject instance,
                                                                     jstring url_) {
    if (isPublishing)//线程在运行
        return;

    if (!jPublisherObj) {
        jPublisherObj = env->NewGlobalRef(instance);
    }

    LOGE("开始推流");
    pthread_t id;
    pthread_t *pid = &id;
    const char *url = env->GetStringUTFChars(url_, 0);

    //存放url路径
    int url_len = strlen(url) + 1;
    path = (char *) (malloc(url_len));
    memset(path, 0, url_len);
    memcpy(path, url, url_len - 1);

    pthread_cond_init(&cond, NULL);
    pthread_mutex_init(&mutex, NULL);
    start_time = RTMP_GetTime();
    pthread_create(pid, NULL, startPush, NULL);

    env->ReleaseStringUTFChars(url_, url);
}

extern "C"
JNIEXPORT void JNICALL
Java_com_aruba_rtmppushapplication_push_natives_NativePush_stopPush(JNIEnv *env, jobject instance) {

    if (isPublishing) {
        pthread_cond_signal(&cond);
        pthread_mutex_lock(&mutex);
        isPublishing = false;
        pthread_mutex_unlock(&mutex);
    } else {
        release(env);
    }
}

/**
 * 组包音频packet
 * @param buffer 
 * @param len 
 */
void send_aac_body(unsigned char *buffer, int len) {
    int size = len + 2;

    RTMPPacket *packet = static_cast<RTMPPacket *>(malloc(sizeof(RTMPPacket)));
    //初始化内部缓冲区
    RTMPPacket_Alloc(packet, size);

    char *body = packet->m_body;
    body[0] = 0xAF;
    body[1] = 0x01;
    memcpy(&body[2], buffer, len);

    //packet参数设置
    packet->m_packetType = RTMP_PACKET_TYPE_AUDIO;//音频类型
    packet->m_nBodySize = size;
    //客户端通过pts自己做同步
    packet->m_nTimeStamp = RTMP_GetTime() - start_time;//为了让客户端知道播放进度
    packet->m_hasAbsTimestamp = 0;
    //指定通道
    packet->m_nChannel = 0x04;
    packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
    put(packet);
}

/**
 * 设置音频参数
 */
extern "C"
JNIEXPORT void JNICALL
Java_com_aruba_rtmppushapplication_push_natives_NativePush_setAudioParams(JNIEnv *env,
                                                                          jobject instance,
                                                                          jint sample,
                                                                          jint channel) {
    handle = faacEncOpen(sample, channel, &inputSamples, &maxOutputBytes);
    if (!handle) {
        LOGE("音频编码器打开失败");
        jmethodID errorId = env->GetMethodID(env->GetObjectClass(instance), "onPostNativeError",
                                             "(I)V");
        throwNativeInfo(env, errorId, -97);
        return;
    }

    //配置
    faacEncConfigurationPtr config = faacEncGetCurrentConfiguration(handle);
    config->mpegVersion = MPEG4;
    config->allowMidside = 1;//中等压缩
    config->aacObjectType = LOW;//音质
    config->outputFormat = 0;//输出格式
    config->useTns = 1;//消除爆破声
    config->useLfe = 0;
    config->inputFormat = FAAC_INPUT_16BIT;
    config->quantqual = 100;
    config->bandWidth = 0; //频宽
    config->shortctl = SHORTCTL_NORMAL;//编码方式

    int ret = faacEncSetConfiguration(handle, config);
    if (!ret) {
        LOGE("音频编码器设置失败");
        jmethodID errorId = env->GetMethodID(env->GetObjectClass(instance), "onPostNativeError",
                                             "(I)V");
        throwNativeInfo(env, errorId, -96);
        return;
    }

    LOGE("音频编码器设置成功");
}

/**
 * 编码音频
 */
extern "C"
JNIEXPORT void JNICALL
Java_com_aruba_rtmppushapplication_push_natives_NativePush_pushAudio(JNIEnv *env, jobject instance,
                                                                     jbyteArray buffer_,
                                                                     jint size) {

    if (!isPublishing || !handle)
        return;

    jbyte *buffer = env->GetByteArrayElements(buffer_, NULL);

    unsigned char *outputBuffer = (unsigned char *) (malloc(
            sizeof(unsigned char) * maxOutputBytes));
    //编码
    int len = faacEncEncode(handle, (int32_t *) buffer, inputSamples, outputBuffer,
                            maxOutputBytes);
    if (len > 0) {
//        LOGE("rtmp音频推流");
        send_aac_body(outputBuffer, len);
    }

    env->ReleaseByteArrayElements(buffer_, buffer, 0);

    if (outputBuffer)
        free(outputBuffer);
}

extern "C"
JNIEXPORT jint JNICALL
Java_com_aruba_rtmppushapplication_push_natives_NativePush_getInputSamples(JNIEnv *env,
                                                                           jobject instance) {
    return inputSamples;
}