//
// Created by EDZ on 2020/2/14.
//

#include "VideoChannel.h"
#include <rtmp.h>

VideoChannel::VideoChannel(){
    pthread_mutex_init(&mutex, 0);
}

VideoChannel::~VideoChannel(){
    pthread_mutex_destroy(&mutex);
}

/**
 * 初始化x264编码器
 * @param width
 * @param height
 * @param bitrate
 * @param fps
 */
void VideoChannel::initVideoEncoder(int width, int height, int bitrate, int fps) {
    // 宽高发生改变，如果正在编码，重复初始化
    pthread_mutex_lock(&mutex);
    mWidth = width;
    mHeight = height;
    mBitrate = bitrate;
    mFps = fps;
    // yuv大小
    y_len = width * height;
    uv_len = y_len /4;

    x264_param_t parm;
    // 最快 零延迟
    x264_param_default_preset(&parm, "ultrafast", "zerolatency");
    // 一帧多少宏块 = 像素值 / 宏块（16*16）
    // 一秒多少宏块 = fps * 一帧多少宏块
    // 编码格式：参考编码规格：参考https://wikipedia.tw.wjbk.site/wiki/H.264/MPEG-4_AVC
    parm.i_level_idc = 32;
    parm.i_width = mWidth;
    parm.i_height = mHeight;

    parm.i_bframe = 0; // 是否没有一帧
    // abr平均码率  cpq恒定质量  crf恒定码率
    parm.rc.i_rc_method = X264_RC_ABR;
    //比特率，单位 kb/s
    parm.rc.i_bitrate = mBitrate / 1000;
    // 瞬时最大码率控制
    parm.rc.i_vbv_max_bitrate = mBitrate / 1000 * 1.2;
    // 缓冲区大小
    parm.rc.i_vbv_buffer_size = mBitrate / 1000;
    // 码率控制不通过时间戳和timebase控制，而是根据fps
    parm.b_vfr_input = 0;
    parm.i_fps_num = mFps; // fps分子
    parm.i_fps_den = 1; // fps分母
    parm.i_timebase_den = parm.i_fps_num;
    parm.i_timebase_num = parm.i_fps_den;
    // 关键帧距离
    parm.i_keyint_max = mFps * 2; // 两秒一个关键帧
    // 是否复制sps和pps到每个关键帧的前面
    parm.b_repeat_headers = 1;
    parm.i_threads = 1;
    x264_param_apply_profile(&parm, "baseline");
    // 输入图像初始化
    pic_in = new x264_picture_t;
    x264_picture_alloc(pic_in, parm.i_csp, parm.i_width, parm.i_height);

    videoEncoder = x264_encoder_open(&parm);
    if(videoEncoder){
        LOGE("X264编码器打开成功");
    }
    pthread_mutex_unlock(&mutex);
}

/**
 * 编码 nv21数据
 * @param data
 */
void VideoChannel::encodeData(int8_t *data) {
    pthread_mutex_lock(&mutex);
    memcpy(pic_in->img.plane[0], data, y_len);// y分量
    for(int i= 0; i< uv_len; ++i){
        *(pic_in->img.plane[1] + i) = *(data + y_len + 2 * i + 1); // u分量
        *(pic_in->img.plane[2] + i) = *(data + y_len + 2 * i); // v分量
    }

    x264_nal_t *nals = 0;
    x264_picture_t pic_out;
    int pi_nal = 0;
    int ret = x264_encoder_encode(videoEncoder, &nals, &pi_nal, pic_in, &pic_out);
    if(ret < 0){
        LOGE("x264编码失败");
        pthread_mutex_unlock(&mutex);
        return;
    }

    int sps_len, pps_len;
    uint8_t sps[100];
    uint8_t pps[100];

    // sps和pps拼装
    for (int i = 0; i < pi_nal; ++i) {
        if(nals[i].i_type == NAL_SPS){
            sps_len = nals[i].i_payload - 4; // 去掉起始码
            memcpy(sps, nals[i].p_payload + 4, sps_len);
        }else if(nals[i].i_type == NAL_PPS){
            pps_len = nals[i].i_payload - 4; // 去掉起始码
            memcpy(pps, nals[i].p_payload + 4, pps_len);
            // sps和pps的发送，是紧挨着的
            sendSpsPps(sps, pps, sps_len, pps_len);
        }else{
            // 关键帧
            sendFrame(nals[i].i_type, nals[i].p_payload, nals[i].i_payload);
        }
    }
    pthread_mutex_unlock(&mutex);
}

void VideoChannel::sendSpsPps(uint8_t *sps, uint8_t *pps, int spsLen, int ppsLen) {
    // 组装RTMPPacket包
    RTMPPacket *packet = new RTMPPacket;
    // 参考。。
    int body_size = 5 + 8 + spsLen + 3 + ppsLen;
    RTMPPacket_Alloc(packet, body_size);
    int i = 0;
    packet->m_body[i++] = 0x17;
    packet->m_body[i++] = 0x00;
    packet->m_body[i++] = 0x00;
    packet->m_body[i++] = 0x00;
    packet->m_body[i++] = 0x00;

    packet->m_body[i++] = 0x01;
    packet->m_body[i++] = sps[1];
    packet->m_body[i++] = sps[2];
    packet->m_body[i++] = sps[3];

    packet->m_body[i++] = 0xFF;
    packet->m_body[i++] = 0xE1;

    packet->m_body[i++] = (spsLen >> 8) & 0xFF;
    packet->m_body[i++] = spsLen & 0xFF;
    // 放sps内容进去
    memcpy(&packet->m_body[i], sps, spsLen);

    i += spsLen; // 注意i要移位

    packet->m_body[i++] = 0x01;

    packet->m_body[i++] = (ppsLen >> 8) & 0xFF;
    packet->m_body[i++] = ppsLen & 0xFF;

    // 放pps内容进去
    memcpy(&packet->m_body[i], pps, ppsLen);

    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
    packet->m_nBodySize = body_size;
    packet->m_nTimeStamp = 0;
    packet->m_hasAbsTimestamp = 0;
    packet->m_nChannel = 10;
    packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
    // 完成封包

    // 发包
    videoCallback(packet);
}

void VideoChannel::setVideoCallback(VideoChannel::VideoCallback callback) {
    this->videoCallback = callback;
}

void VideoChannel::sendFrame(int type, uint8_t *payload, int iPayload) {
    if(payload[2] == 0x00){
        payload += 4;
        iPayload -= 4;
    }else if(payload[2] == 0x01){
        payload += 3;
        iPayload -= 3;
    }

    // 组装RTMPPacket包
    RTMPPacket *packet = new RTMPPacket;
    // 参考。。
    int body_size = 5 + 4 + iPayload;
    RTMPPacket_Alloc(packet, body_size);

    packet->m_body[0] = 0x17; // 非关键帧
    if(type == NAL_SLICE_IDR){
        packet->m_body[0] = 0x17;// 关键帧
    }
    packet->m_body[1] = 0x01;
    packet->m_body[2] = 0x00;
    packet->m_body[3] = 0x00;
    packet->m_body[4] = 0x00;

    packet->m_body[5] = (iPayload >> 24) & 0xFF;
    packet->m_body[6] = (iPayload >> 16) & 0xFF;
    packet->m_body[7] = (iPayload >> 8) & 0xFF;
    packet->m_body[8] = iPayload & 0xFF;

    memcpy(&packet->m_body[9], payload, iPayload);

    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
    packet->m_nBodySize = body_size;
    packet->m_nTimeStamp = -1;
    packet->m_hasAbsTimestamp = 1;
    packet->m_nChannel = 10;
    packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
    // 完成封包

    // 发包
    videoCallback(packet);
}



