﻿#include "codecs/h264_encoder_impl.h"

#include <rtc_base/logging.h>
#include <wels/codec_ver.h>
#include <common_video/libyuv/include/webrtc_libyuv.h>
#include <modules/video_coding/utility/simulcast_rate_allocator.h>

namespace xrtc {

namespace {

const bool kOpenH264EncoderDetailedLogging = false;
// QP scaling thresholds.
static const int kLowH264QpThreshold = 24;
static const int kHighH264QpThreshold = 37;

// 将OpenH264定义的帧类型转换成webrtc中定义的帧类型
webrtc::VideoFrameType ConvertToVideoFrameType(EVideoFrameType type) {
    switch (type) {
    case EVideoFrameType::videoFrameTypeIDR:
        return webrtc::VideoFrameType::kVideoFrameKey;
    case EVideoFrameType::videoFrameTypeI:
    case EVideoFrameType::videoFrameTypeP:
    case EVideoFrameType::videoFrameTypeIPMixed:
    case EVideoFrameType::videoFrameTypeSkip:
        return webrtc::VideoFrameType::kVideoFrameDelta;
    case EVideoFrameType::videoFrameTypeInvalid:
        break;
    }

    return webrtc::VideoFrameType::kEmptyFrame;
}

void RtpFragmentize(webrtc::EncodedImage* encoded_image,
    SFrameBSInfo* info) 
{
    // 计算存储编码后的数据的最小空间大小
    size_t required_capacity = 0;
    for (int layer = 0; layer < info->iLayerNum; ++layer) {
        const SLayerBSInfo& layer_info = info->sLayerInfo[layer];
        for (int nal = 0; nal < layer_info.iNalCount; ++nal) {
            required_capacity += layer_info.pNalLengthInByte[nal];
        }
    }

    auto buffer = webrtc::EncodedImageBuffer::Create(required_capacity);
    encoded_image->SetEncodedData(buffer);
    encoded_image->set_size(0);

    // 拷贝编码后实际的数据
    for (int layer = 0; layer < info->iLayerNum; ++layer) {
        size_t layer_len = 0;
        const SLayerBSInfo& layer_info = info->sLayerInfo[layer];
        for (int nal = 0; nal < layer_info.iNalCount; ++nal) {
            layer_len += layer_info.pNalLengthInByte[nal];
        }

        memcpy(buffer->data() + encoded_image->size(),
            layer_info.pBsBuf, layer_len);
        encoded_image->set_size(encoded_image->size() + layer_len);
    }
}

} // namespace

H264EncoderImpl::H264EncoderImpl(const cricket::VideoCodec& codec) :
    packetization_mode_(webrtc::H264PacketizationMode::SingleNalUnit)
{
    std::string packetization_mode_str;
    if (codec.GetParam(cricket::kH264FmtpPacketizationMode,
        &packetization_mode_str) &&
        packetization_mode_str == "1") 
    {
        packetization_mode_ = webrtc::H264PacketizationMode::NonInterleaved;
    }
}

H264EncoderImpl::~H264EncoderImpl() {
}

int H264EncoderImpl::InitEncode(const webrtc::VideoCodec* inst, 
    const VideoEncoder::Settings& settings) 
{
    // 1. 基本的检查
    if (!inst || inst->codecType != webrtc::kVideoCodecH264) {
        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    }

    if (inst->maxFramerate == 0) {
        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    }

    if (inst->width < 1 || inst->height < 1) {
        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    }

    // 2. 释放之前的资源
    int32_t ret = Release();
    if (ret != WEBRTC_VIDEO_CODEC_OK) {
        return ret;
    }

    codec_ = *inst;
    max_payload_size_ = settings.max_payload_size;

    // 初始化LayerConfig
    config_.width = codec_.width;
    config_.height = codec_.height;
    config_.max_frame_rate = static_cast<float>(codec_.maxFramerate);
    config_.target_bps = codec_.startBitrate * 1000;
    config_.max_bps = codec_.maxBitrate * 1000;
    config_.frame_dropping_on = codec_.H264()->frameDroppingOn;
    config_.key_frame_interval = codec_.H264()->keyFrameInterval;

    // 3. 创建OpenH264 encoder实例
    if (WelsCreateSVCEncoder(&openh264_encoder_) != 0) {
        Release();
        RTC_LOG(LS_WARNING) << "failed to create OpenH264 encoder";
        return WEBRTC_VIDEO_CODEC_ERROR;
    }

    // 4. 是否设置OpenH264详细的输出log
    if (kOpenH264EncoderDetailedLogging) {
        int trace_level = WELS_LOG_DETAIL;
        openh264_encoder_->SetOption(ENCODER_OPTION_TRACE_LEVEL, &trace_level);
    }

    // 5. 构建编码器参数并初始化到OpenH264编码器当中
    SEncParamExt encoder_params = CreateEncoderParams();
    if (openh264_encoder_->InitializeExt(&encoder_params) != 0) {
        RTC_LOG(LS_WARNING) << "init OpenH264 encoder failed";
        Release();
        return WEBRTC_VIDEO_CODEC_ERROR;
    }

    // 6. 设置图像的格式
    int video_format = EVideoFormatType::videoFormatI420;
    openh264_encoder_->SetOption(ENCODER_OPTION_DATAFORMAT, &video_format);

    // 7. 初始化EncodedImage
    int new_capacity = webrtc::CalcBufferSize(webrtc::VideoType::kI420,
        codec_.width, codec_.height);
    encoded_image_.SetEncodedData(
        webrtc::EncodedImageBuffer::Create(new_capacity));
    encoded_image_._encodedWidth = codec_.width;
    encoded_image_._encodedHeight = codec_.height;
    encoded_image_.set_size(0);

    // 设置码率
    webrtc::SimulcastRateAllocator init_allocator(codec_);
    webrtc::VideoBitrateAllocation allocation =
        init_allocator.Allocate(webrtc::VideoBitrateAllocationParameters(
            webrtc::DataRate::KilobitsPerSec(codec_.startBitrate),
            codec_.maxFramerate));
    SetRates(RateControlParameters(allocation, codec_.maxFramerate));

    return WEBRTC_VIDEO_CODEC_OK;
}

int32_t H264EncoderImpl::RegisterEncodeCompleteCallback(
    webrtc::EncodedImageCallback* callback) 
{
    encoded_image_callback_ = callback;
    return WEBRTC_VIDEO_CODEC_OK;
}

int32_t H264EncoderImpl::Release() {
    if (openh264_encoder_) {
        WelsDestroySVCEncoder(openh264_encoder_);
        openh264_encoder_ = nullptr;
    }
    return WEBRTC_VIDEO_CODEC_OK;
}

int32_t H264EncoderImpl::Encode(
    const webrtc::VideoFrame& frame, 
    const std::vector<webrtc::VideoFrameType>* frame_types) 
{
    if (!openh264_encoder_) {
        RTC_LOG(LS_WARNING) << "OpenH264 encoder not init";
        return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
    }

    if (!encoded_image_callback_) {
        RTC_LOG(LS_WARNING) << "encoded image callback not register";
        return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
    }

    // 获取编码前的I420格式的图像数据
    rtc::scoped_refptr<webrtc::I420BufferInterface> frame_buffer =
        frame.video_frame_buffer()->ToI420();
    if (!frame_buffer) {
        RTC_LOG(LS_WARNING) << "get I420 data error";
        return WEBRTC_VIDEO_CODEC_ERROR;
    }

    // 当前是否需要编码一个关键帧
    bool send_key_frame = false;
    if (config_.key_frame_request && config_.sending) {
        send_key_frame = true;
    }

    // 外部调用需要强制编码一个I帧
    if (!send_key_frame && frame_types->size() > 0 &&
        (*frame_types)[0] == webrtc::VideoFrameType::kVideoFrameKey) 
    {
        send_key_frame = true;
    }

    // 构造一个picture，该对象会传递给编码器
    picture_.iPicWidth = config_.width;
    picture_.iPicHeight = config_.height;
    picture_.iColorFormat = EVideoFormatType::videoFormatI420;
    picture_.uiTimeStamp = frame.ntp_time_ms();
    picture_.iStride[0] = frame_buffer->StrideY();
    picture_.iStride[1] = frame_buffer->StrideU();
    picture_.iStride[2] = frame_buffer->StrideV();
    picture_.pData[0] = const_cast<uint8_t*>(frame_buffer->DataY());
    picture_.pData[1] = const_cast<uint8_t*>(frame_buffer->DataU());
    picture_.pData[2] = const_cast<uint8_t*>(frame_buffer->DataV());

    if (!config_.sending) {
        return WEBRTC_VIDEO_CODEC_OK;
    }

    if (send_key_frame) {
        openh264_encoder_->ForceIntraFrame(true);
        config_.key_frame_request = false;
    }

    // 开始编码
    SFrameBSInfo info;
    memset(&info, 0, sizeof(info));

    int enc_ret = openh264_encoder_->EncodeFrame(&picture_, &info);
    if (enc_ret != 0) {
        RTC_LOG(LS_WARNING) << "OpenH264 encode failed: " << enc_ret;
        return WEBRTC_VIDEO_CODEC_ERROR;
    }

    // 构造回调结果
    encoded_image_._encodedWidth = config_.width;
    encoded_image_._encodedHeight = config_.height;
    encoded_image_.SetTimestamp(frame.timestamp());
    encoded_image_._frameType = ConvertToVideoFrameType(info.eFrameType);
    encoded_image_.SetSpatialIndex(0);

    // 将编码后的数据设置到回调结果当中
    RtpFragmentize(&encoded_image_, &info);

    // 由于编码器会节省带宽，可能会跳帧，encoded_image.size == 0
    if (encoded_image_.size() > 0) {
        h264_bitstream_parser_.ParseBitstream(encoded_image_);
        encoded_image_.qp_ = h264_bitstream_parser_.GetLastSliceQp().value_or(-1);

        webrtc::CodecSpecificInfo codec_specific;
        codec_specific.codecType = webrtc::VideoCodecType::kVideoCodecH264;
        codec_specific.codecSpecific.H264.packetization_mode =
            packetization_mode_;
        codec_specific.codecSpecific.H264.idr_frame =
            encoded_image_._frameType == webrtc::VideoFrameType::kVideoFrameKey;
        codec_specific.codecSpecific.H264.base_layer_sync = false;
        codec_specific.codecSpecific.H264.temporal_idx = webrtc::kNoTemporalIdx;

        encoded_image_callback_->OnEncodedImage(encoded_image_,
            &codec_specific);
    }

    return WEBRTC_VIDEO_CODEC_OK;
}

void H264EncoderImpl::SetRates(const RateControlParameters& parameters) {
    if (!openh264_encoder_) {
        RTC_LOG(LS_WARNING) << "SetRates() encoder uninitialized";
        return;
    }

    if (parameters.framerate_fps < 1.0f) {
        RTC_LOG(LS_WARNING) << "invalid frame rate: " <<
            parameters.framerate_fps;
        return;
    }

    // 如果目标码率为0，需要暂停编码
    if (parameters.bitrate.get_sum_bps() == 0) {
        config_.SetStreamState(false);
        return;
    }

    codec_.maxFramerate = static_cast<float>(parameters.framerate_fps);
    config_.target_bps = parameters.bitrate.GetSpatialLayerSum(0);
    config_.max_frame_rate = codec_.maxFramerate;
    // 根据新的config来调整编码器的参数
    if (config_.target_bps) {
        config_.SetStreamState(true);

        SBitrateInfo target_bitrate;
        memset(&target_bitrate, 0, sizeof(target_bitrate));
        target_bitrate.iBitrate = config_.target_bps;
        target_bitrate.iLayer = SPATIAL_LAYER_0;
        openh264_encoder_->SetOption(ENCODER_OPTION_BITRATE, &target_bitrate);
        openh264_encoder_->SetOption(ENCODER_OPTION_FRAME_RATE,
            &config_.max_frame_rate);
    }
    else {
        config_.SetStreamState(false);
    }
}

webrtc::VideoEncoder::EncoderInfo H264EncoderImpl::GetEncoderInfo() const {
    webrtc::VideoEncoder::EncoderInfo encoder_info;
    encoder_info.supports_native_handle = false;
    encoder_info.implementation_name = "OpenH264";
    encoder_info.scaling_settings = webrtc::VideoEncoder::ScalingSettings(
        kLowH264QpThreshold, kHighH264QpThreshold);
    encoder_info.is_hardware_accelerated = false;
    encoder_info.supports_simulcast = true;
    encoder_info.preferred_pixel_formats = { webrtc::VideoFrameBuffer::Type::kI420 };

    return encoder_info;
}

SEncParamExt H264EncoderImpl::CreateEncoderParams() {
    SEncParamExt encoder_params;
    // 设置默认参数
    openh264_encoder_->GetDefaultParams(&encoder_params);

    // 设置应用类型
    if (codec_.mode == webrtc::VideoCodecMode::kRealtimeVideo) {
        encoder_params.iUsageType = CAMERA_VIDEO_REAL_TIME;
    }
    else if (codec_.mode == webrtc::VideoCodecMode::kScreensharing) {
        encoder_params.iUsageType = SCREEN_CONTENT_REAL_TIME;
    }

    // 设置图像宽高
    encoder_params.iPicWidth = config_.width;
    encoder_params.iPicHeight = config_.height;

    // 设置编码器目标码率
    encoder_params.iTargetBitrate = config_.target_bps;
    // Keep unspecified. WebRTC's max codec bitrate is not the same setting
    // as OpenH264's iMaxBitrate. More details in https://crbug.com/webrtc/11543
    encoder_params.iMaxBitrate = UNSPECIFIED_BIT_RATE;

    // 设置码控模式
    encoder_params.iRCMode = RC_BITRATE_MODE;

    // 设置最大帧率
    encoder_params.fMaxFrameRate = config_.max_frame_rate;

    // 下面的参数是扩展参数
    // 设置是否启用跳帧策略
    // 如果是，为了保持码率的稳定，编码器会跳帧
    // 如果否，即使buffer满了，也不会跳帧
    encoder_params.bEnableFrameSkip = config_.frame_dropping_on;

    // 设置I帧的间隔，GOP
    encoder_params.uiIntraPeriod = config_.key_frame_interval;

    // SPS/PPS策略
    // 如果可以，尽量使用已经存在的列表中的SPS
    encoder_params.eSpsPpsIdStrategy = SPS_LISTING;

    // 设置NAL单元最大的大小
    encoder_params.uiMaxNalSize = 0;

    // 编码的线程数
    // 0: 自动模式，由编码器自己决定启动多少个线程
    // 1: 单线程
    // > 1: 多线程，代表线程数
    encoder_params.iMultipleThreadIdc = 1;

    RTC_LOG(LS_INFO) << "OpenH264 version " << OPENH264_MAJOR << "."
        << OPENH264_MINOR;

    encoder_params.iTemporalLayerNum = 1;
    encoder_params.iSpatialLayerNum = 1;

    // 设置SVC空域基本层的编程参数
    encoder_params.sSpatialLayers[0].iVideoWidth = encoder_params.iPicWidth;
    encoder_params.sSpatialLayers[0].iVideoHeight = encoder_params.iPicHeight;
    encoder_params.sSpatialLayers[0].fFrameRate = encoder_params.fMaxFrameRate;
    encoder_params.sSpatialLayers[0].iSpatialBitrate =
        encoder_params.iTargetBitrate;
    encoder_params.sSpatialLayers[0].iMaxSpatialBitrate =
        encoder_params.iMaxBitrate;

    switch (packetization_mode_) {
    case webrtc::H264PacketizationMode::SingleNalUnit:
        // 该参数只有mode=SM_FIXEDSLCNUM_SLICE才会有用
        encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceNum = 1;
        encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceMode =
            SM_SIZELIMITED_SLICE; // 根据帧的大小，来决定slice的个数
        encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceSizeConstraint
            = max_payload_size_;
        break;
    case webrtc::H264PacketizationMode::NonInterleaved:
        // 单Slice
        encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceNum = 1;
        encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceMode
            = SM_FIXEDSLCNUM_SLICE; // slice个数是固定的
        break;
    }

    return encoder_params;
}

void H264EncoderImpl::LayerConfig::SetStreamState(bool send_stream) {
    if (send_stream && !sending) {
        key_frame_request = true;
    }

    sending = send_stream;
}

} // namespace xrtc