//
// Created by Whg on 2019-09-18.
//

#include <cstring>
#include "VideoChannel.h"
#include "include/x264.h"
#include "librtmp/rtmp.h"
#include "macro.h"


void VideoChannel::setVideoCallback(VideoChannel::VideoCallback videoCallback) {
    this->videoCallback = videoCallback;
}

/**
 * 发送关键帧和非关键帧
 */
void VideoChannel::sendFrame(int type, uint8_t *payload, int i_payload) {

    if (payload[2] == 0x00) {
        i_payload -= 4;
        payload += 4;
    } else {
        i_payload -= 3;
        payload += 3;
    }

    //看表
    int bodySize = 9 + i_payload;
    RTMPPacket *packet = new RTMPPacket;
    RTMPPacket_Alloc(packet, bodySize);

    //非关键帧
    packet->m_body[0] = 0x27;

    if (type == NAL_SLICE_IDR) {
        //关键帧
        packet->m_body[0] = 0x17;
    }

    //类型
    packet->m_body[1] = 0x01;
    //时间戳
    packet->m_body[2] = 0x00;
    packet->m_body[3] = 0x00;
    packet->m_body[4] = 0x00;
    //数据长度  int  4个字节
    packet->m_body[5] = (i_payload >> 24) & 0xff;
    packet->m_body[6] = (i_payload >> 16) & 0xff;
    packet->m_body[7] = (i_payload >> 8) & 0xff;
    packet->m_body[8] = (i_payload) & 0xff;

    //图片数据
    memcpy(&packet->m_body[9], payload, i_payload);

    packet->m_hasAbsTimestamp = 0;
    packet->m_nBodySize = bodySize;
    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
    packet->m_nChannel = 0x10;
    packet->m_headerType = RTMP_PACKET_SIZE_LARGE;

    videoCallback(packet);
}

void VideoChannel::setVideoEncInfo(int width, int height, int fps, int bitrate) {
    mWidth = width;
    mHeight = height;
    mFps = fps;
    mBitrate = bitrate;
    ySize = width * height;
    //uv21的数据
    uvSize = ySize / 4;

    //初始化x264的编码器
    //编码(H264)
    x264_param_t param;
    x264_param_default_preset(&param, "ultrafast", "zerolatency");
    //编码复杂度
    param.i_level_idc = 32;
    //输入格式  nv21  服务器支持i420
    param.i_csp = X264_CSP_I420;

    param.i_width = width;
    param.i_height = height;
    //无B帧
    param.i_bframe = 0;
    //参数i_rc_method便是码率控制，CQP（恒定质量） CRF（恒定码率）ABR（平均码率）
    param.rc.i_rc_method = X264_RC_ABR;
    //码率
    param.rc.i_bitrate = bitrate / 1000;
    //瞬时最大码率
    param.rc.i_vbv_max_bitrate = bitrate / 1000 * 1.2;
    //设置了i_vbv_max_bitrate必须设置此参数，码率控制区大小，耽误kbps
    param.rc.i_vbv_buffer_size = bitrate / 1000;

    //帧率的分子
    param.i_fps_num = fps;
    //帧率的分母
    param.i_fps_den = 1;

    //为了音视频同步
    //时间基的分子
    param.i_timebase_den = param.i_fps_num;
    param.i_timebase_num = param.i_fps_den;

    param.b_vfr_input = 0;
    //帧距离（关键帧） 2s一个关键帧
    param.i_keyint_max = fps * 2;
    //是否复制sps和pps放在每个关键帧的前面，该参数设置是让每个关键帧（I帧）附带（sps/pps）
    //sps和pps包含了初始化h.264解码器所需要的信息参数
    //sps 序列参数集
    //pps  图像参数集
    param.b_repeat_headers = 1;
    //多线程
    param.i_threads = 1;

    x264_param_apply_profile(&param, "baseline");

    //实例化一个解码器
    videoCodec = x264_encoder_open(&param);
    pic_in = new x264_picture_t;
    x264_picture_alloc(pic_in, X264_CSP_I420, width, height);
}

/**
 * 视频解码  将nv21 ->  yuvi420
 *
         * yyyyyyyy    yyyyyyyy
         * yyyyyyyy    yyyyyyyy
         * yyyyyyyy    yyyyyyyy
         * yyyyyyyy -> yyyyyyyy
         * vuvuvuvu    uuuuuuuu
         * uvuvuvuv    vvvvvvvv
 */
void VideoChannel::encodeData(int8_t *data) {

    //数据data   容器 pic_in
    //plane 0，1，2分别存储 y ,u ,v 的数据

//

//解码    nv21  yuvI420
//   数据 data      容器  pic_in
    //y数据  pic_in->img.plane
    memcpy(pic_in->img.plane[0], data, ySize);
//    data
    for (int i = 0; i < uvSize; ++i) {
        //v数据
        *(pic_in->img.plane[1] + i) = *(data + ySize + i * 2 + 1);//u  1  3   5  7  9
        *(pic_in->img.plane[2] + i) = *(data + ySize + i * 2);//  v  0   2  4  6  8  10
    }
    //NALU单元
    x264_nal_t *pp_nal;
    //编码出来有几个数据 （多少NALU单元）
    int pi_nal;
    x264_picture_t pic_out;
    x264_encoder_encode(videoCodec, &pp_nal, &pi_nal, pic_in, &pic_out);
    int sps_len;
    int pps_len;
    uint8_t sps[100];
    uint8_t pps[100];
    for (int i = 0; i < pi_nal; ++i) {
//        单独发送的
        if (pp_nal[i].i_type == NAL_SPS) {
            sps_len = pp_nal[i].i_payload - 4;
            memcpy(sps, pp_nal[i].p_payload + 4, sps_len);
        } else if (pp_nal[i].i_type == NAL_PPS) {
            pps_len = pp_nal[i].i_payload - 4;
            memcpy(pps, pp_nal[i].p_payload + 4, pps_len);
            sendSpsPps(sps, pps, sps_len, pps_len);
        } else {

//             关键帧    // 非关键帧
            sendFrame(pp_nal[i].i_type, pp_nal[i].p_payload, pp_nal[i].i_payload);
        }
    }

}

// rmtp 的封装格式
void VideoChannel::sendSpsPps(uint8_t *sps, uint8_t *pps, int sps_len, int pps_len) {
////sps、pps  ------> packet
    int bodySize = 13 + sps_len + 3 + pps_len;
    RTMPPacket *packet = new RTMPPacket;
    RTMPPacket_Alloc(packet, bodySize);

    int i = 0;
    //固定头
    packet->m_body[i++] = 0x17;
    //类型
    packet->m_body[i++] = 0x00;
    packet->m_body[i++] = 0x00;
    packet->m_body[i++] = 0x00;
    packet->m_body[i++] = 0x00;

    //版本
    packet->m_body[i++] = 0x01;

    //编码规格
    packet->m_body[i++] = sps[1];
    packet->m_body[i++] = sps[2];
    packet->m_body[i++] = sps[3];
    packet->m_body[i++] = 0xFF;

    //整个sps
    packet->m_body[i++] = 0xE1;

    //sps长度  将长度存放在16进制里面，分高8位和低8位
    packet->m_body[i++] = (sps_len >> 8) & 0xff;
    packet->m_body[i++] = sps_len & 0xff;

    memcpy(&packet->m_body[i], sps, sps_len);

    i += sps_len;


    packet->m_body[i++] = 0x01;
    //pps长度  将长度存放在16进制里面，分高8位和低8位
    packet->m_body[i++] = (pps_len >> 8) & 0xff;
    packet->m_body[i++] = pps_len & 0xff;

    memcpy(&packet->m_body[i], pps, pps_len);

    //设置数据类型
    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
    packet->m_nBodySize = bodySize;
    //随意分配一个管道(尽量避开rtmp.c中使用的）
    packet->m_nChannel = 10;

    //sps  pps没有时间戳
    packet->m_nTimeStamp = 0;
    //不使用绝对时间
    packet->m_hasAbsTimestamp = 0;

    packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;

    //回调
    videoCallback(packet);
}

