//
// Created on 2024/3/28.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
// please include "napi/native_api.h".

#include "ohos_hardware_video_encoder.h"
#include "libyuv/convert_from.h"
#include <bits/alltypes.h>
#include <hilog/log.h>
#include <multimedia/player_framework/native_avmemory.h>

namespace webrtc{
namespace ohos{

#define NTP_TIMESTAMP_DELTA 2208988800ull
// QP scaling thresholds.
static const int kLowH264QpThreshold = 24;
static const int kHighH264QpThreshold = 37;
#include "common_video/libyuv/include/webrtc_libyuv.h"
#if defined(WEBRTC_USE_H264) 
    bool g_rtc_use_h264 = true;
#endif

OhosHardwareVideoEncoder::OhosHardwareVideoEncoder(const cricket::VideoCodec& codec)
{  
    RTC_CHECK(absl::EqualsIgnoreCase(codec.name, cricket::kH264CodecName));    
}

void OhosHardwareVideoEncoder::OutputThread()
{
    OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "outputThread", "OhosHardwareVideoEncoder::OutputThread enter");
    
    std::vector<uint8_t> encodeFrame;
    OH_AVCodecBufferAttr avcodecBuffAttr{0, 0, 0, 0};
    CodecBufferInfo codecBufferInfoReceive;
    EncodedImage encodeImage;
    // 如果是sps和pps，保存到一个静态数据中
    std::vector<uint8_t> paramaterInfo;
    
    while (encoder_->Stat() == OhosVideoEncoder::EncodeStat::RUNNING) {
        int32_t res = codecData_.OutputData(codecBufferInfoReceive);
        if (res != AV_ERR_OK) {
            OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "outputThread", "OhosHardwareVideoEncoder::OutputThread get data fail! %{public}d", res);
            continue;
        }
        encodeFrame.clear();

        codecBufferInfoReceive.GetAttr(&avcodecBuffAttr);
        // sps和pps
        if (avcodecBuffAttr.flags == AVCODEC_BUFFER_FLAGS_CODEC_DATA) {
            paramaterInfo.clear();
            paramaterInfo.insert(paramaterInfo.end(), codecBufferInfoReceive.GetBuff(), codecBufferInfoReceive.GetBuff() + avcodecBuffAttr.size);
            continue;
        } else if (avcodecBuffAttr.flags == AVCODEC_BUFFER_FLAGS_SYNC_FRAME) { // 关键帧 如果是i帧，在i帧前面加上sps和pps    
            encodeFrame.insert(encodeFrame.end(), paramaterInfo.begin(), paramaterInfo.end());
            encodeFrame.insert(encodeFrame.end(), codecBufferInfoReceive.GetBuff(), codecBufferInfoReceive.GetBuff() + avcodecBuffAttr.size);
            // 用鸿蒙构建encoded_iamge                
            rtc::scoped_refptr<EncodedImageBuffer> encoded_data = EncodedImageBuffer::Create(encodeFrame.data(), encodeFrame.size());

            encodeImage._encodedWidth = width_;
            encodeImage._encodedHeight = height_;
            encodeImage.SetRtpTimestamp(timestamp_);
            encodeImage.SetColorSpace(colorSpace_);
            encodeImage.SetEncodedData(encoded_data);
            encodeImage.set_size(avcodecBuffAttr.size);
        } else {
            rtc::scoped_refptr<EncodedImageBuffer> encoded_data = EncodedImageBuffer::Create(codecBufferInfoReceive.GetBuff(), avcodecBuffAttr.size);
            encodeImage._encodedWidth = width_;
            encodeImage._encodedHeight = height_;
            encodeImage.SetRtpTimestamp(timestamp_);
            encodeImage.SetColorSpace(colorSpace_);
            encodeImage.SetEncodedData(encoded_data);
            encodeImage.set_size(avcodecBuffAttr.size);
        }

        encoder_->FreeOutPutData(codecBufferInfoReceive.GetBufferIndex());

        // 创建一个 CodecSpecificInfo 结构体
        CodecSpecificInfo codecSpecificInfo;
        codecSpecificInfo.codecType = kVideoCodecH264;
        codecSpecificInfo.codecSpecific.H264 = {H264PacketizationMode::NonInterleaved};  // 将 H264 特定的信息复制到 CodecSpecificInfo 结构体中
        encoded_image_callback_->OnEncodedImage(encodeImage, &codecSpecificInfo);
    }
}

OhosHardwareVideoEncoder::~OhosHardwareVideoEncoder()
{
    outputThread_->join();
    OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "~OhosHardwareVideoEncoder", "leave OhosHardwareVideoEncoder");
}

int32_t OhosHardwareVideoEncoder::InitEncode(const VideoCodec *codec_settings, int32_t number_of_cores, size_t max_payload_size)
{
    VideoCodecType codecType = codec_settings->codecType;  
    
    uint16_t width = codec_settings->width;
    uint16_t height = codec_settings->height;
    
    OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "InitEncode", "width is: %{public}d", width);
    OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "InitEncode", "height is: %{public}d", height);
    
    encoder_ = std::make_unique<OhosVideoEncoder>();
    encoder_->Create();

    codecData_.formatInfo = &formatInfo_;
    codecData_.formatInfo->videoWidth = width;
    codecData_.formatInfo->videoHeight = height;
    encoder_->Config(&codecData_);
    encoder_->Start();
    nextPTS_ = static_cast<unsigned long long>(time(NULL)) + NTP_TIMESTAMP_DELTA; // 计算NTP时间戳
    OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "InitEncode", "enter OhosHardwareVideoEncoder");

    unsigned int startBitrate = codec_settings->startBitrate; // kilobits/sec.
    unsigned int maxBitrate = codec_settings->maxBitrate;   // kilobits/sec.
    unsigned int minBitrate = codec_settings->minBitrate;   // kilobits/sec.
    
    if (startBitrate < minBitrate) {
        startBitrate = minBitrate;
    }

    if (startBitrate > maxBitrate) {
        startBitrate = maxBitrate;
    }
    
    fps_ = startBitrate;
    
    uint32_t maxFramerate = codec_settings->maxFramerate;
    
    OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "encode", "number_of_cores is %{public}d", number_of_cores);
    OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "encode", "max_payload_size is %{public}ld", max_payload_size);
    OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "encode", "codecType is %{public}d", codecType);
    OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "encode", "width is %{public}hu", width);
    OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "encode", "height is %{public}hu", height);
    OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "encode", "startBitrate is %{public}d", startBitrate);
    OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "encode", "maxBitrate is %{public}d", maxBitrate);
    OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "encode", "minBitrate is %{public}u", minBitrate);
    OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "encode", "maxFramerate is %{public}uld", maxFramerate);

    outputThread_ = std::make_unique<std::thread>(&OhosHardwareVideoEncoder::OutputThread, this);

    return WEBRTC_VIDEO_CODEC_OK;
}

int OhosHardwareVideoEncoder::InitEncode(const VideoCodec* codec_settings, const VideoEncoder::Settings& settings)
{
    if(codec_settings == nullptr) {
        return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
    }

    return InitEncode(codec_settings, settings.number_of_cores, settings.max_payload_size);
}

int32_t OhosHardwareVideoEncoder::RegisterEncodeCompleteCallback(EncodedImageCallback *callback)
{        
    encoded_image_callback_ = callback;
    return WEBRTC_VIDEO_CODEC_OK;
}

void OhosHardwareVideoEncoder::SetRates(const RateControlParameters &parameters)
{ 
    fps_ = parameters.framerate_fps; 
}

int32_t OhosHardwareVideoEncoder::Release()
{
    encoder_->Stop();
    encoder_->Release();
    return WEBRTC_VIDEO_CODEC_OK;
}

VideoEncoder::EncoderInfo OhosHardwareVideoEncoder::GetEncoderInfo() const 
{
    EncoderInfo info;
    info.supports_native_handle = false;
    info.implementation_name = "OhosH264";
    info.scaling_settings = VideoEncoder::ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold);
    info.is_hardware_accelerated = false;
    info.supports_simulcast = true;
    info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420};
    return info;
}

int32_t OhosHardwareVideoEncoder::Encode(const VideoFrame &frame, const std::vector<VideoFrameType> *frame_types) 
{
    OH_LOG_Print(LOG_APP, LOG_ERROR, LOG_DOMAIN, "h264Encoder", "Encode start");
    const VideoFrameBuffer &videoFrameBuffer = *frame.video_frame_buffer()->ToI420();
    const int width = videoFrameBuffer.width();
    const int height = videoFrameBuffer.height();

    if (!encoded_image_callback_) {
        OH_LOG_Print(LOG_APP, LOG_ERROR, LOG_DOMAIN, "h264Encoder", "encoded_image_callback_ is null");
      return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
    }

    rtc::scoped_refptr<I420BufferInterface> frame_buffer = frame.video_frame_buffer()->ToI420();
    if (!frame_buffer) {
      OH_LOG_Print(LOG_APP, LOG_ERROR, LOG_DOMAIN, "h264Encoder", "Failed to convert image to I420. Can't encode frame.");
      return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE;
    }

    RTC_CHECK(frame_buffer->type() == VideoFrameBuffer::Type::kI420 ||
              frame_buffer->type() == VideoFrameBuffer::Type::kI420A);

    if (encoder_->Stat() == OhosVideoEncoder::EncodeStat::RUNNING) {
        codecData_.Start();
    } else {
        OH_LOG_Print(LOG_APP, LOG_ERROR, LOG_DOMAIN, "h264Encoder", "codecData_ shutdown");
        codecData_.ShutDown();
        return WEBRTC_VIDEO_CODEC_ERROR;//编码器有问题
    }

    uint32_t bufSize = frame.height() * frame.width() * 3 / 2;
    OH_AVCodecBufferAttr attr;
    attr.size = bufSize;
    attr.offset = 0;
    attr.pts = 0;          
    attr.flags = AVCODEC_BUFFER_FLAGS_CODEC_DATA;

    CodecBufferInfo codecBufferInfo(bufSize, attr);

    memcpy(codecBufferInfo.GetBuff(), frame.video_frame_buffer().get()->GetI420()->DataY(), frame.height() * frame.width());
    memcpy(codecBufferInfo.GetBuff() + frame.height() * frame.width(), frame.video_frame_buffer().get()->GetI420()->DataU(), frame.height() * frame.width() / 4);
    memcpy(codecBufferInfo.GetBuff() + frame.height() * frame.width() + (frame.height() * frame.width()) / 4, frame.video_frame_buffer().get()->GetI420()->DataV(), frame.height() * frame.width() / 4);
    
    int32_t res = codecData_.InputData(codecBufferInfo, std::chrono::milliseconds(10));
    if (res == 0) {
        width_ = width;
        height_ = height;
        timestamp_ = frame.timestamp();
        colorSpace_ = frame.color_space();
        encoder_->PushInputData(codecBufferInfo);
    } else {
        OH_LOG_Print(LOG_APP, LOG_ERROR, LOG_DOMAIN, "h264Encoder", "Failed to InputData");
        return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE;
    }
            
    OH_LOG_Print(LOG_APP, LOG_ERROR, LOG_DOMAIN, "h264Encoder", "Encode end");
    
    return WEBRTC_VIDEO_CODEC_OK;
}

std::unique_ptr<OhosHardwareVideoEncoder> OhosHardwareVideoEncoder::Create()
{
    return Create(cricket::CreateVideoCodec(cricket::kH264CodecName));
}

std::unique_ptr<OhosHardwareVideoEncoder> OhosHardwareVideoEncoder::Create(const cricket::VideoCodec &codec)
{
    RTC_DCHECK(H264Encoder::IsSupported());
#if defined(WEBRTC_USE_H264)
    RTC_CHECK(g_rtc_use_h264);
    OH_LOG_Print(LOG_APP, LOG_ERROR, LOG_DOMAIN, "h264Encoder", "Create OhosHardwareVideoEncoder");
    return std::make_unique<OhosHardwareVideoEncoder>(codec);
#else
    OH_LOG_Print(LOG_APP, LOG_ERROR, LOG_DOMAIN, "h264Encoder", "not Create OhosHardwareVideoEncoder");
    RTC_DCHECK_NOTREACHED();
    return nullptr;
#endif
}

}
}
