// Copyright 2022 Huawei Cloud Computing Technology Co., Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include <cinttypes>
#include <climits>
#include <cstdio>
#include <cstdint>
#include <sys/system_properties.h>
#include "CasBuffer.h"
#include "CasVideoUtil.h"
#include "CasDecoder.h"
#include "CasLog.h"

#define ENABLE_DECODE_LOG 0

namespace {
    enum MediaCodecResolution {
        KEY_WIDTH = 720,
        KEY_HEIGHT = 1280
    };
    const char *MIME_TYPE_H264 = "video/avc";
    const char *MIME_TYPE_H265 = "video/hevc";
    const char *HW_H264_DECODER_NAME = "OMX.hisi.video.decoder.avc";
    const char *HW_H265_DECODER_NAME = "OMX.hisi.video.decoder.hevc";
    // the end-of-picture bits appending to the origin h264 frame
    const uint8_t H264_NAL_EOPIC[] = {
            0x00, 0x00, 0x01, 0x1D,
            0x00, 0x00, 0x01, 0x1E,
            0x48, 0x53, 0x50, 0x49,
            0x43, 0x45, 0x4E, 0x44
    };
    const int PROPERTY_VALUE_MAX = 92;
    const int SDK_VERSION_BASELINE = 21;
    const uint32_t RETRY_LIMITS = 50;
    // This indicates that the buffer only contains part of a frame, and
    // the decoder should batch the data until a buffer
    // without this flag appears before decoding the frame.
    const uint32_t BUFFER_FLAG_PARTIAL_FRAME = 8;
    const int64_t MAX_TIMEOUT_USEC = 10000LL;
    const int64_t KILO = 1000LL;
    const int SDK_VERSION_BORDER = 28;
    const char *AMF_VENDOR_HISI_REQ_28 = "vendor.hisi-ext-video-dec-avc.video-scene-for-cloud-pc-req";
    const char *AMF_VENDOR_HISI_RDY_28 = "vendor.hisi-ext-video-dec-avc.video-scene-for-cloud-pc-rdy";
    const char *AMF_VENDOR_HISI_REQ_28_LATER = "vendor.hisi-ext-low-latency-video-dec.video-scene-for-low-latency-req";
    const char *AMF_VENDOR_HISI_RDY_28_LATER = "vendor.hisi-ext-low-latency-video-dec.video-scene-for-low-latency-rdy";
}

/*
 * @fn CasDecoder
 * @brief constructor
 */
CasDecoder::CasDecoder() = default;

/*
 * @fn CasDecoder
 * @brief destructor
 */
CasDecoder::~CasDecoder()
{
    this->Destroy();
    this->m_casVideoUtil = nullptr;
}

/*
 * @fn Init
 * @brief to create and configure MediaCodec.
 * @param[in] nativeWindow View to display of (type <tt>ANativeWindow *</tt>)
 * @return errno: DECODER_SUCCESS
 *                DECODER_INIT_ERR
 */
uint32_t CasDecoder::Init(ANativeWindow *nativeWindow, FrameType frameType, int rotationDegrees)
{
    m_casVideoUtil = CasVideoUtil::GetInstance();
    if (m_casVideoUtil == nullptr || !m_casVideoUtil->Init()) {
        ERR("Failed to instantiate CasVideoUtil");
        return DECODER_INIT_ERR;
    }
    m_isQcom = ISQcom();
    m_isHuawei = ISHuaweiDevice();
    m_sdkNo = GetSDKVersion();
    if (m_sdkNo < SDK_VERSION_BASELINE) {
        INFO("DECODER_SDK_UNSUPPORTED m_sdkNo = %d", m_sdkNo);
        return DECODER_SDK_UNSUPPORTED;
    }
    m_frameType = frameType;
    m_mimeType = frameType == FrameType::H264 ? MIME_TYPE_H264 : MIME_TYPE_H265;
    if (m_isHuawei && m_sdkNo >= SDK_VERSION_BORDER) {
        if (frameType == FrameType::H264) {
            m_mediaCodec = AMediaCodec_createCodecByName(HW_H264_DECODER_NAME);
        } else {
            m_mediaCodec = AMediaCodec_createCodecByName(HW_H265_DECODER_NAME);
        }
        if (m_mediaCodec == nullptr) {
            m_mediaCodec = AMediaCodec_createDecoderByType(m_mimeType);
            m_isHuawei = false;
        }
    } else {
        m_mediaCodec = AMediaCodec_createDecoderByType(m_mimeType);
    }

    if (m_mediaCodec == nullptr) {
        ERR("Failed to create MediaCodec.");
        return DECODER_INIT_ERR;
    }
    AMediaFormat *fmt = CreateMediaCodecFmt(rotationDegrees);
    if (fmt == nullptr) {
        ERR("Failed to create MediaCodec format.");
        return DECODER_INIT_ERR;
    }
    int32_t rc = AMediaCodec_configure(m_mediaCodec, fmt, nativeWindow, nullptr, 0);
    AMediaFormat_delete(fmt);
    if (rc != AMEDIA_OK) {
        ERR("Failed to configure MediaCodec, errno: %d.", rc);
        return DECODER_INIT_ERR;
    }
    INFO("Initialise MediaCodec success.");
    return DECODER_SUCCESS;
}

/*
 * @fn Start
 * @brief to start MediaCodec
 * @return errno: DECODER_SUCCESS
 *                DECODER_START_ERR
 */
uint32_t CasDecoder::Start()
{
    int32_t rc = AMediaCodec_start(m_mediaCodec);
    if (rc != AMEDIA_OK) {
        ERR("Failed to start MediaCodec, errno: %d.", rc);
        return DECODER_START_ERR;
    }
    m_isStreamBeginFrame = false;
    INFO("Start MediaCodec success.");
    return DECODER_SUCCESS;
}

/*
 * @fn Input
 * @brief to place processing data into InputBuffer, and MediaCodec would
 * fetch it to process asynchronously.
 * @param[in] buf Initial address of processing data of (type <tt>const uint8_t*</tt>)
 * @param[in] len Length of processing data of (type <tt>size_t</tt>)
 * @return errno: DECODER_SUCCESS
 *                DECODER_INPUT_ERR
 */
uint32_t CasDecoder::Input(const uint8_t *buf, const size_t len)
{
    if (!m_isStreamBeginFrame) {
        if(IsStreamBeginFrame(buf)) {
            m_isStreamBeginFrame = true;
            m_inputFrameCount = 0;
            m_outputFrameCount = 0;
        } else {
            return DECODER_SUCCESS;
        }
    }
    size_t dataLen = IsNeedAppendMockFrameBytes() ? sizeof(H264_NAL_EOPIC) + len : len;
    uint8_t *data = AssembleMemory(buf, len);
    if (data == nullptr) {
        ERR("Failed to assemble memory.");
        Destroy();
        return DECODER_INPUT_ERR;
    }
    for (size_t pos = 0; pos != dataLen;) {
        size_t bufSize = 0;
        ssize_t bufId = 0;
        uint8_t *ptr = GetInputBuffer(bufSize, bufId);
        if (ptr == nullptr) {
            ERR("Failed to get input buffer.");
            Destroy();
            FreeBuffer(data);
            return DECODER_INPUT_ERR;
        }
        size_t feedLen = (dataLen - pos) > bufSize ? bufSize : (dataLen - pos);
        errno_t en = memcpy_s(ptr, feedLen, data + pos, feedLen);
        if (en != EOK) {
            ERR("Place data into input buffer, error: %d.", en);
            Destroy();
            FreeBuffer(data);
            return DECODER_INPUT_ERR;
        }
        pos += feedLen;
        // Send the specified buffer to the codec for processing.
        uint64_t now = m_casVideoUtil->GetNow();
        int rc = AMediaCodec_queueInputBuffer(m_mediaCodec, bufId, 0, feedLen, now,
            pos != dataLen ? BUFFER_FLAG_PARTIAL_FRAME : 0);
        if (rc != AMEDIA_OK) {
            ERR("Failed to queue input buffer, errno: %d.", rc);
            Destroy();
            FreeBuffer(data);
            return DECODER_INPUT_ERR;
        } else {
            m_inputFrameCount++;
            DBG("DecodeLog: input frame count = %d, size = %d", m_inputFrameCount, len);
        }
        if (ENABLE_DECODE_LOG) {
            char logBuf[1024] = {0};
            sprintf_s(logBuf, sizeof(logBuf), "Frame(%" PRIu64 ") @buffer(%zd) input success.", now, bufId);
        }
    }
    FreeBuffer(data);
    return DECODER_SUCCESS;
}

/*
 * @fn OutputAndDisplay
 * @brief to output processed data into OutputBuffer from MediaCodec,
 * and release OutputBuffer to render for display
 * @return errno: DECODER_SUCCESS
 *                DECODER_OUTPUT_RETRY
 *                DECODER_OUTPUT_ERR
 */
uint32_t CasDecoder::OutputAndDisplay(bool isDisplay)
{
    AMediaCodecBufferInfo info;
    // Get the index of the next available buffer of processed data.
    ssize_t bufId = AMediaCodec_dequeueOutputBuffer(m_mediaCodec, &info, MAX_TIMEOUT_USEC);
    if (bufId >= 0) {
        if (ENABLE_DECODE_LOG) {
            uint64_t frameUs = static_cast<uint64_t>(info.presentationTimeUs);
            uint64_t nowUs = m_casVideoUtil->GetNow();
            uint64_t latency = (nowUs - frameUs) / KILO;
            m_casVideoUtil->SetTimestamp(nowUs);
            char logBuf[1024] = {0};
            sprintf_s(logBuf, sizeof(logBuf) - 1,
                "Frame(%" PRIu64 ") @buffer(%zd) output success, decode latency %" PRIu64 " ms", frameUs, bufId,
                latency);
            DBG("DecodeLog: frame count %d decode latency is %d", m_outputFrameCount, latency);
        }
        // If you are done with a buffer, use this call to return the buffer to the codec.
        // If you have not specified an output surface when configuring this video codec,
        // this call will simply return the buffer to the codec.
        m_outputFrameCount++;
        DBG("DecodeLog: dequeue output buffer success: bufId = %d, m_outputFrameCount = %d", bufId, m_outputFrameCount);

        if (!isDisplay) {
            DBG("DecodeLog: frame count %d output buffer latency exceed %d frames, release one frame. ", m_outputFrameCount, m_inputFrameCount - m_outputFrameCount);
            AMediaCodec_releaseOutputBuffer(m_mediaCodec, bufId, false);
        } else {
            AMediaCodec_releaseOutputBuffer(m_mediaCodec, bufId, true);
        }

        return DECODER_SUCCESS;
    }
    switch (bufId) {
        case AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED:
            INFO("Output buffers changed.");
            break;
        case AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED: {
            AMediaFormat *fmt = AMediaCodec_getOutputFormat(m_mediaCodec);
            if (fmt != nullptr) {
                INFO("Output format change to: %s.", AMediaFormat_toString(fmt));
                AMediaFormat_delete(fmt);
            } else {
                INFO("Output format change to nullptr.");
            }
        }
            break;
        case AMEDIACODEC_INFO_TRY_AGAIN_LATER:
            DBG("Retry to get output buffer.");
            break;
        default:
            Destroy();
            return DECODER_OUTPUT_ERR;
    }
    return DECODER_OUTPUT_RETRY;
}

/*
 * @fn GetFps
 * @brief to get the realtime fps
 * @return uint32_t, fps
 */
uint32_t CasDecoder::GetFps() const
{
    return m_casVideoUtil->GetFps();
}

/*
 * @fn Destroy
 * @brief to destroy MediaCodec, and release the resources it possesses.
 * @return uint32_t, errno: DECODER_SUCCESS
 */
void CasDecoder::Destroy() noexcept
{
    if (m_mediaCodec != nullptr) {
        AMediaCodec_stop(m_mediaCodec);
        AMediaCodec_flush(m_mediaCodec);
        AMediaCodec_delete(m_mediaCodec);
        m_mediaCodec = nullptr;
    }
    m_casVideoUtil->DestroyInstance();
    INFO("Destroy MediaCodec success.");
}

/*
 * @fn CreateMediaCodecFmt
 * @brief to create AMediaFormat for MediaCodec configuration.
 * @return AMediaFormat *, AMediaFormat object for MediaCodec configuration
 */
AMediaFormat *CasDecoder:: CreateMediaCodecFmt(int rotationDegrees) const
{
    AMediaFormat *fmt = AMediaFormat_new();
    if (fmt != nullptr) {
        int width = MediaCodecResolution::KEY_WIDTH;
        int height = MediaCodecResolution::KEY_HEIGHT;
        AMediaFormat_setInt32(fmt, "rotation-degrees", rotationDegrees);
        INFO("ORIENTATION set degree %d", rotationDegrees);
        AMediaFormat_setString(fmt, AMEDIAFORMAT_KEY_MIME, m_mimeType);
        AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_WIDTH, width);
        AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_HEIGHT, height);
        // Operate at maximum rate to lower latency as much as possible on
        // some Qualcomm platforms. We could also set KEY_PRIORITY to 0 (realtime)
        // but that will actually result in the decoder crashing if it can't satisfy
        // our (ludicrous) operating rate requirement.
        if (!m_isQcom) {
            AMediaFormat_setInt32(fmt, "operating-rate", SHRT_MAX);
            INFO("AMediaFormat set operating rate.");
        }

        if (m_isHuawei) {
            if (m_sdkNo > SDK_VERSION_BORDER) {
                AMediaFormat_setInt32(fmt, AMF_VENDOR_HISI_REQ_28_LATER, 1);
                AMediaFormat_setInt32(fmt, AMF_VENDOR_HISI_RDY_28_LATER, -1);
            }
        }
    }
    INFO("Create MediaCodec format success.");
    return fmt;
}

/*
 * @fn AssembleMemory
 * @brief to append h264 frame bits with the eopic bits
 * @param[in] buf Initial address of processing data of (type <tt>const uint8_t*</tt>)
 * @param[in] len Length of processing data of (type <tt>size_t</tt>)
 * @return pointer to the assembly memory, nullptr when failure
 */
uint8_t *CasDecoder::AssembleMemory(const uint8_t *buf, const size_t len) const
{
    bool needAppendBytes = IsNeedAppendMockFrameBytes();
    uint32_t size = needAppendBytes ? sizeof(H264_NAL_EOPIC) + len : len;
    uint8_t *data = reinterpret_cast<uint8_t *>(AllocBuffer(size));
    if (data == nullptr) {
        ERR("Failed to allocate memory.");
        return nullptr;
    }
    errno_t en = memcpy_s(data, len, buf, len);
    if (en != EOK) {
        ERR("Failed to copy frame into memory, error: %d", en);
        FreeBuffer(data);
        return nullptr;
    }

    if (needAppendBytes) {
        en = memcpy_s(data + len, sizeof(H264_NAL_EOPIC), H264_NAL_EOPIC, sizeof(H264_NAL_EOPIC));
        if (en != EOK) {
            ERR("Failed to copy Eopic bits into memory, error: %d", en);
            FreeBuffer(data);
            return nullptr;
        }
    }

    return data;
}

/*
 * @fn GetInputBuffer
 * @brief to append h264 frame bits with the eopic bits
 * @param[out] bufSize size of input buffer allocated by MediaCodec of (type <tt>size_t</tt>)
 * @param[out] bufId id of the input buffer of (type <tt>size_t</tt>)
 * @return pointer to the input buffer, nullptr when failure
 */
uint8_t *CasDecoder::GetInputBuffer(size_t &bufSize, ssize_t &bufId) const
{
    int retriedTimes = 0;
    do {
        // Get the index of the next available input buffer.
        // An app will typically use this with getInputBuffer() to get a pointer to the buffer,
        // then copy the data to be encoded or decoded into the buffer before passing it
        // to the codec.
        bufId = AMediaCodec_dequeueInputBuffer(m_mediaCodec, MAX_TIMEOUT_USEC);
        if (bufId == -1) {
            if (++retriedTimes == RETRY_LIMITS) {
                ERR("Failed to re-dequeue input buffer.");
                return nullptr;
            }
            DBG("No Available CasBuffer, retried %d times to dequeue input buffer.", retriedTimes);
        }
        if (bufId < -1) {
            ERR("Failed to dequeue input buffer.");
            return nullptr;
        }
    } while (bufId < 0);
    // Get an input buffer.
    // The specified buffer index must have been previously obtained from dequeueInputBuffer,
    // and not yet queued.
    return AMediaCodec_getInputBuffer(m_mediaCodec, bufId, &bufSize);
}

/*
 * @fn CheckSDKVersion
 * @brief to check if the sdn version is Android 7.0 or above
 * @return true if sdk version supported
 */
bool CasDecoder::CheckSDKVersion() const
{
    char sdkVer[PROPERTY_VALUE_MAX] = {0};
    int sdkNo = 0;
    if (__system_property_get("ro.build.version.sdk", sdkVer) != 0) {
        sdkNo = atoi(sdkVer);
    }
    return sdkNo >= SDK_VERSION_BASELINE;
}

int CasDecoder::GetSDKVersion() const
{
    char sdkVer[PROPERTY_VALUE_MAX] = {0};
    int sdkNo = 0;
    if (__system_property_get("ro.build.version.sdk", sdkVer) != 0) {
        sdkNo = atoi(sdkVer);
        INFO("Get sdk version success.");
    }
    return sdkNo;
}

bool CasDecoder::ISHuaweiDevice() const
{
    char emuiVerNo[PROPERTY_VALUE_MAX] = {0};

    if (__system_property_get("ro.build.version.emui", emuiVerNo) != 0) {
        INFO("Check device success.");
    }
    return strlen(emuiVerNo) > 0;
}

bool CasDecoder::ISQcom() const
{
    char hardware[PROPERTY_VALUE_MAX] = {0};

    if (__system_property_get("ro.hardware", hardware) != 0) {
        INFO("Get hardware success.");
    }
    INFO("Hardware is %s", hardware);
    return strcmp(hardware, "qcom") == 0;
}

bool CasDecoder::IsNeedAppendMockFrameBytes() const
{
    return m_isHuawei && m_frameType == FrameType::H264;
}

uint64_t CasDecoder::GetCurrentTimeMs() const
{
    constexpr int oneSecond = 1000;
    timespec now;
    clock_gettime(CLOCK_REALTIME, &now);
    return static_cast<uint64_t>(now.tv_sec * oneSecond + now.tv_nsec / (oneSecond * oneSecond));
}

bool CasDecoder::IsStreamBeginFrame(const uint8_t *buf) const
{
    if (buf[0] != 0x00 || buf[1] != 0x00 || buf[2] != 0x00 || buf[3] != 0x01) {
        return false;
    }
    if (m_frameType == FrameType::H265) {
        return buf[4] == 0x40 || buf[4] == 0x46;
    } else if (m_frameType == FrameType::H264) {
        return buf[4] == 0x67;
    }
    return false;
}