#include <jni.h>
#include <malloc.h>
#include <string.h>
#include "Log.h"
#include "x264.h"
#include "queue.h"
#include "rtmp.h"
#include <pthread.h>


x264_picture_t pic_in;
x264_picture_t pic_out;

unsigned int start_time;
//YUV个数
int y_len, u_len, v_len;
//x264编码处理器
x264_t *x264_encoder;

//线程处理
pthread_mutex_t mutex;
pthread_cond_t cond;

unsigned int start_time;

//rtmp流媒体地址
char *rtmp_path;



void add_264_sequence_header(unsigned char pps[100], unsigned char sps[100], int len, int sps_len);

void add_264_body(uint8_t *payload, int i_payload);

void add_rtmp_packet(RTMPPacket *pPacket);

//JNIEXPORT jlong JNICALL
//Java_com_liuyongyi_livepush_rtmp_RtmpClient_open(JNIEnv *env, jclass type, jstring url_,
//                                                 jboolean isPublishMode) {
//    const char *url = (*env)->GetStringUTFChars(env, url_, 0);
//    LOGD("RTMP_OPENING:%s",url);
//    RTMP* rtmp = RTMP_Alloc();
//    if (rtmp == NULL) {
//        LOGD("RTMP_Alloc=NULL");
//        return NULL;
//    }
//
//    RTMP_Init(rtmp);
//    int ret = RTMP_SetupURL(rtmp, url);
//
//    if (!ret) {
//        RTMP_Free(rtmp);
//        rtmp=NULL;
//        LOGD("RTMP_SetupURL=ret");
//        return NULL;
//    }
//    if (isPublishMode) {
//        RTMP_EnableWrite(rtmp);
//    }
//
//    ret = RTMP_Connect(rtmp, NULL);
//    if (!ret) {
//        RTMP_Free(rtmp);
//        rtmp=NULL;
//        LOGD("RTMP_Connect=ret");
//        return NULL;
//    }
//    ret = RTMP_ConnectStream(rtmp, 0);
//
//    if (!ret) {
//        ret = RTMP_ConnectStream(rtmp, 0);
//        RTMP_Close(rtmp);
//        RTMP_Free(rtmp);
//        rtmp=NULL;
//        LOGD("RTMP_ConnectStream=ret");
//        return NULL;
//    }
//    (*env)->ReleaseStringUTFChars(env, url_, url);
//    LOGD("RTMP_OPENED");
//    return rtmp;
//}
//
//JNIEXPORT jint JNICALL
//Java_com_liuyongyi_livepush_rtmp_RtmpClient_read(JNIEnv *env, jclass type, jlong rtmp,
//                                                 jbyteArray data_, jint offset, jint size) {
//    char* data = malloc(size*sizeof(char));
//
//    int readCount = RTMP_Read((RTMP*)rtmp, data, size);
//
//    if (readCount > 0) {
//        (*env)->SetByteArrayRegion(env, data_, offset, readCount, data);  // copy
//    }
//    free(data);
//
//    return readCount;
//}
//
//JNIEXPORT jint JNICALL
//Java_com_liuyongyi_livepush_rtmp_RtmpClient_write(JNIEnv * env, jobject thiz,jlong rtmp, jbyteArray data, jint size, jint type, jint ts) {
//    LOGD("start write");
//    jbyte *buffer = (*env)->GetByteArrayElements(env, data, NULL);
//    RTMPPacket *packet = (RTMPPacket*)malloc(sizeof(RTMPPacket));
//    RTMPPacket_Alloc(packet, size);
//    RTMPPacket_Reset(packet);
//    if (type == RTMP_PACKET_TYPE_INFO) { // metadata
//        packet->m_nChannel = 0x03;
//    } else if (type == RTMP_PACKET_TYPE_VIDEO) { // video
//        packet->m_nChannel = 0x04;
//    } else if (type == RTMP_PACKET_TYPE_AUDIO) { //audio
//        packet->m_nChannel = 0x05;
//    } else {
//        packet->m_nChannel = -1;
//    }
//
//    packet->m_nInfoField2  =  ((RTMP*)rtmp)->m_stream_id;
//
//    LOGD("write data type: %d, ts %d", type, ts);
//
//    memcpy(packet->m_body,  buffer,  size);
//    packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
//    packet->m_hasAbsTimestamp = FALSE;
//    packet->m_nTimeStamp = ts;
//    packet->m_packetType = type;
//    packet->m_nBodySize  = size;
//    int ret = RTMP_SendPacket((RTMP*)rtmp, packet, 0);
//    RTMPPacket_Free(packet);
//    free(packet);
//    (*env)->ReleaseByteArrayElements(env, data, buffer, 0);
//    if (!ret) {
//        LOGD("end write error %d", sockerr);
//        return sockerr;
//    }else
//    {
//        LOGD("end write success");
//        return 0;
//    }
//}
//
//JNIEXPORT jint JNICALL
//Java_com_liuyongyi_livepush_rtmp_RtmpClient_close(JNIEnv *env, jclass type, jlong rtmp) {
//
//    RTMP_Close((RTMP*)rtmp);
//    RTMP_Free((RTMP*)rtmp);
//    return 0;
//
//}
//
//JNIEXPORT jstring JNICALL
//Java_com_liuyongyi_livepush_rtmp_RtmpClient_getIpAddr(JNIEnv *env, jclass type, jlong rtmp) {
//    if(rtmp!=0){
//        RTMP* r= (RTMP*)rtmp;
//        return (*env)->NewStringUTF(env, r->ipaddr);
//    }else {
//        return (*env)->NewStringUTF(env, "");
//    }
//}

void *push_thread(void * arg){
    //建立连接
    RTMP *rtmp = RTMP_Alloc();
    if(!rtmp){
        LOGD("RTMP初始化失败");
        goto end;
    }
    RTMP_Init(rtmp);
    rtmp->Link.timeout =5;//超时时间
    //设置推流地址
    RTMP_SetupURL(rtmp,rtmp_path);

    RTMP_EnableWrite(rtmp);

    if(!RTMP_Connect(rtmp,NULL)){
        LOGD("%s","RTMP 连接失败");
        goto end;
    }
    //计时
    start_time = RTMP_GetTime();
    if(!RTMP_ConnectStream(rtmp,0)){ //连接流
        goto end;
    }
    for (;;) {
//发送
        pthread_mutex_lock(&mutex);
        pthread_cond_wait(&cond,&mutex);
        //取出队列中的RTMPPacket
        RTMPPacket *packet = queue_get_first();
        if(packet){
            queue_delete_first(); //移除
            packet->m_nInfoField2 = rtmp->m_stream_id; //RTMP协议，stream_id数据
            int i = RTMP_SendPacket(rtmp,packet,TRUE); //TRUE放入librtmp队列中，并不是立即发送
            if(!i){
                LOGD("RTMP 断开");
                RTMPPacket_Free(packet);
                pthread_mutex_unlock(&mutex);
                goto end;
            }
            RTMPPacket_Free(packet);
        }
        pthread_mutex_unlock(&mutex);
    }
end:
    LOGD("%s","释放资源");
    RTMP_Close(rtmp);
    RTMP_Free(rtmp);
    return 0;
}

JNIEXPORT void JNICALL
Java_com_liuyongyi_livepush_camerapush_natives_PusherNative_startPusher(JNIEnv *env,
                                                                        jobject instance,
                                                                        jstring url_jstr) {
    const char *url_cstr = (*env)->GetStringUTFChars(env, url_jstr, NULL);

    //复制url_cstr内容到rtmp_path
    rtmp_path = malloc(strlen(url_cstr) + 1);
    memset(rtmp_path,0,strlen(url_cstr) + 1);
    memcpy(rtmp_path,url_cstr,strlen(url_cstr));

    //初始化互斥锁与条件变量
    pthread_mutex_init(&mutex,NULL);
    pthread_cond_init(&cond,NULL);

    //创建队列
    create_queue();

    pthread_t push_thread_id;
    pthread_create(&push_thread_id,NULL,push_thread,NULL);

    (*env)->ReleaseStringUTFChars(env, url_jstr, url_cstr);
}


JNIEXPORT void JNICALL
Java_com_liuyongyi_livepush_camerapush_natives_PusherNative_stopPusher(JNIEnv *env,
                                                                       jobject instance) {

    free(rtmp_path);

}

JNIEXPORT void JNICALL
Java_com_liuyongyi_livepush_camerapush_natives_PusherNative_release(JNIEnv *env, jobject instance) {

    // TODO

}

JNIEXPORT void JNICALL
Java_com_liuyongyi_livepush_camerapush_natives_PusherNative_setVideoOptions(JNIEnv *env, jobject instance, jint width, jint height, jint bitrate, jint fps) {
    x264_param_t param;
    //x264_param_default_preset 设置
    x264_param_default_preset(&param,"ultrafast","zerolatency");

    y_len = width * height;
    u_len = y_len / 4;
    v_len = u_len;

    //编码输入的像素格式YUV420P
    param.i_csp = X264_CSP_I420;
    param.i_width  = width;
    param.i_height = height;
    //参数i_rc_method表示码率控制，CQP(恒定质量)，CRF(恒定码率)，ABR(平均码率)
    //恒定码率，会尽量控制在固定码率
    param.rc.i_rc_method = X264_RC_CRF;
    param.rc.i_bitrate = bitrate / 1000; //* 码率(比特率,单位Kbps)
    param.rc.i_vbv_max_bitrate = bitrate / 1000 * 1.2; //瞬时最大码率

    //码率控制不通过timebase和timestamp，而是fps
    param.b_vfr_input = 0;
    param.i_fps_num = fps; //* 帧率分子
    param.i_fps_den = 1; //* 帧率分母
    param.i_timebase_den = param.i_fps_num;
    param.i_timebase_num = param.i_fps_den;
    param.i_threads = 1;//并行编码线程数量，0默认为多线程

    //是否把SPS和PPS放入每一个关键帧
    //SPS Sequence Parameter Set 序列参数集，PPS Picture Parameter Set 图像参数集
    //为了提高图像的纠错能力
    param.b_repeat_headers = 1;
    //设置Level级别
    param.i_level_idc = 51;
    //设置Profile档次
    //baseline级别，没有B帧
    x264_param_apply_profile(&param,"baseline");

    //x264_picture_t（输入图像）初始化

    x264_picture_alloc(&pic_in, param.i_csp, param.i_width, param.i_height);

    //打开编码器
    x264_encoder = x264_encoder_open(&param);
    if(x264_encoder){
        LOGD("打开编码器成功...");
    }

}

JNIEXPORT void JNICALL
Java_com_liuyongyi_livepush_camerapush_natives_PusherNative_setAudioOptions(JNIEnv *env,
                                                                            jobject instance,
                                                                            jint simpleRateInHz,
                                                                            jint channel) {


}

void add_264_body(unsigned char *buf, int len) {
//去掉起始码（界定符）
    if(buf[2] == 0x00){  //00 00 00 01
        buf += 4;
        len -= 4;
    }else if(buf[2] == 0x01){ // 00 00 01
        buf += 3;
        len -= 3;
    }
    int body_size = len + 9;
    RTMPPacket *packet = malloc(sizeof(RTMPPacket));
    RTMPPacket_Alloc(packet,body_size);

    unsigned char * body = packet->m_body;
    //当NAL头信息中，type（5位）等于5，说明这是关键帧NAL单元
    //buf[0] NAL Header与运算，获取type，根据type判断关键帧和普通帧
    //00000101 & 00011111(0x1f) = 00000101
    int type = buf[0] & 0x1f;
    //Inter Frame 帧间压缩
    body[0] = 0x27;//VideoHeaderTag:FrameType(2=Inter Frame)+CodecID(7=AVC)
    //IDR I帧图像
    if (type == NAL_SLICE_IDR) {
        body[0] = 0x17;//VideoHeaderTag:FrameType(1=key frame)+CodecID(7=AVC)
    }
    //AVCPacketType = 1
    body[1] = 0x01; /*nal unit,NALUs（AVCPacketType == 1)*/
    body[2] = 0x00; //composition time 0x000000 24bit
    body[3] = 0x00;
    body[4] = 0x00;

    //写入NALU信息，右移8位，一个字节的读取？
    body[5] = (len >> 24) & 0xff;
    body[6] = (len >> 16) & 0xff;
    body[7] = (len >> 8) & 0xff;
    body[8] = (len) & 0xff;

    /*copy data*/
    memcpy(&body[9], buf, len);

    packet->m_hasAbsTimestamp = 0;
    packet->m_nBodySize = body_size;
    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;//当前packet的类型：Video
    packet->m_nChannel = 0x04;
    packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
//	packet->m_nTimeStamp = -1;
    packet->m_nTimeStamp = RTMP_GetTime() - start_time;//记录了每一个tag相对于第一个tag（File Header）的相对时间
    add_rtmp_packet(packet);

}


JNIEXPORT void add_rtmp_packet(RTMPPacket *packet) {
    pthread_mutex_lock(&mutex);
    queue_append_last(packet);
    pthread_cond_signal(&cond);
    pthread_mutex_unlock(&mutex);
}
void add_264_sequence_header(unsigned char *pps, unsigned char *sps, int pps_len, int sps_len) {
    RTMPPacket *packet = malloc(sizeof(RTMPPacket));
    int body_size = 16+ sps_len + pps_len;//按照H264标准配置SPS和PPS，共使用了16字节
    RTMPPacket_Alloc(packet,body_size);
    unsigned char * body = packet->m_body;
    int i = 0;
    //二进制表示：00010111
    body[i++] = 0x17;//VideoHeaderTag:FrameType(1=key frame)+CodecID(7=AVC)
    body[i++] = 0x00;//AVCPacketType = 0表示设置AVCDecoderConfigurationRecord
    //composition time 0x000000 24bit ?
    body[i++] = 0x00;
    body[i++] = 0x00;
    body[i++] = 0x00;

    /*AVCDecoderConfigurationRecord*/
    body[i++] = 0x01;//configurationVersion，版本为1
    body[i++] = sps[1];//AVCProfileIndication
    body[i++] = sps[2];//profile_compatibility
    body[i++] = sps[3];//AVCLevelIndication
    //?
    body[i++] = 0xFF;//lengthSizeMinusOne,H264 视频中 NALU的长度，计算方法是 1 + (lengthSizeMinusOne & 3),实际测试时发现总为FF，计算结果为4.

    /*sps*/
    body[i++] = 0xE1;//numOfSequenceParameterSets:SPS的个数，计算方法是 numOfSequenceParameterSets & 0x1F,实际测试时发现总为E1，计算结果为1.
    body[i++] = (sps_len >> 8) & 0xff;//sequenceParameterSetLength:SPS的长度
    body[i++] = sps_len & 0xff;//sequenceParameterSetNALUnits
    memcpy(&body[i], sps, sps_len);
    i += sps_len;

    /*pps*/
    body[i++] = 0x01;//numOfPictureParameterSets:PPS 的个数,计算方法是 numOfPictureParameterSets & 0x1F,实际测试时发现总为E1，计算结果为1.
    body[i++] = (pps_len >> 8) & 0xff;//pictureParameterSetLength:PPS的长度
    body[i++] = (pps_len) & 0xff;//PPS
    memcpy(&body[i], pps, pps_len);
    i += pps_len;

    //Message Type，RTMP_PACKET_TYPE_VIDEO：0x09
    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
    //Payload Length
    packet->m_nBodySize = body_size;
    //Time Stamp：4字节
    //记录了每一个tag相对于第一个tag（File Header）的相对时间。
    //以毫秒为单位。而File Header的time stamp永远为0。
    packet->m_nTimeStamp = 0;
    packet->m_hasAbsTimestamp = 0;
    packet->m_nChannel = 0x04; //Channel ID，Audio和Vidio通道
    packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM; //?
    //将RTMPPacket加入队列
    add_rtmp_packet(packet);

}

void JNICALL
Java_com_liuyongyi_livepush_camerapush_natives_PusherNative_sendVideo(JNIEnv *env, jobject instance,
                                                                      jbyteArray data_) {
    jbyte *nv21_data = (*env)->GetByteArrayElements(env, data_, NULL);

    jbyte* u = pic_in.img.plane[1];
    jbyte* v = pic_in.img.plane[2];
    //nv21 4:2:0 Formats, 12 Bits per Pixel
    //nv21与yuv420p，y个数一致，uv位置对调
    //nv21转yuv420p  y = w*h,u/v=w*h/4
    //nv21 = yvu yuv420p=yuv y=y u=y+1+1 v=y+1
    memcpy(pic_in.img.plane[0], nv21_data, y_len);
    int i;
    for (i = 0; i < u_len; i++) {
        *(u + i) = *(nv21_data + y_len + i * 2 + 1);
        *(v + i) = *(nv21_data + y_len + i * 2);
    }
    //x264编码之后得到的是nalu数组
    x264_nal_t *nal_t = NULL;
    int nalnumber = -1;//nal的个数
    //编码
    if(x264_encoder_encode(x264_encoder,&nal_t,&nalnumber,&pic_in,&pic_out) < 0){
        LOGD("编码失败");
        return;
    }
    int sps_len,pps_len;
    unsigned char sps[100];
    unsigned char pps[100];

    //使用过rtmp协议将h264编码的视频发送到流媒体服务器
    int j =0;//遍历NAL单元
    for(;j < &nalnumber;j++){
        if(nal_t[i].i_type == NAL_SPS){
            //复制SPS数据
            sps_len = nal_t[i].i_payload - 4;
            memcpy(sps,nal_t[i].p_payload + 4,sps_len); //不复制四字节起始码
        }else if(nal_t[i].i_type == NAL_PPS){
            //复制PPS数据
            pps_len = nal_t[i].i_payload - 4;
            memcpy(pps,nal_t[i].p_payload + 4,pps_len); //不复制四字节起始码

            //发送序列信息
            //h264关键帧会包含SPS和PPS数据
            add_264_sequence_header(pps,sps,pps_len,sps_len);

        }else{
            //发送帧信息
            add_264_body(nal_t[i].p_payload,nal_t[i].i_payload);
        }
    }


    (*env)->ReleaseByteArrayElements(env, data_, nv21_data, 0);
}


JNIEXPORT void JNICALL
Java_com_liuyongyi_livepush_camerapush_natives_PusherNative_sendAudio(JNIEnv *env, jobject instance,
                                                                      jbyteArray data_, jint leng) {
    jbyte *data = (*env)->GetByteArrayElements(env, data_, NULL);

    // TODO

    (*env)->ReleaseByteArrayElements(env, data_, data, 0);
}