/*
 * ffpipenode_android_mediacodec_vdec.c
 *
 * Copyright (c) 2014 Bilibili
 * Copyright (c) 2014 Zhang Rui <bbcallen@gmail.com>
 *
 * This file is part of ijkPlayer.
 *
 * ijkPlayer is free software; you can redistribute it and/or
 * modify it under the terms of the GNU Lesser General Public
 * License as published by the Free Software Foundation; either
 * version 2.1 of the License, or (at your option) any later version.
 *
 * ijkPlayer is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 * Lesser General Public License for more details.
 *
 * You should have received a copy of the GNU Lesser General Public
 * License along with ijkPlayer; if not, write to the Free Software
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 */

#include "ffpipenode_ohos_mediacodec_vdec.h"
#include "video_decoder.h"
#include "../ff_ffpipeline.h"
#include "ff_ffpipenode.h"
#include "video_decoder.h"
#include <bits/alltypes.h>
#include <mutex>
#include <memory>
#include <atomic>
#include <unistd.h>
#include <chrono>
#include <string.h>
struct IJKFF_Pipenode_Opaque {
    FFPlayer *ffp;
};
typedef struct SDL_Vout_Opaque {
    struct EGLNativeWindowType   * native_window;
//    SDL_AMediaCodec * acodec;
    int              null_native_window_warned; // reduce log for null window
    int              next_buffer_id;

    //ISDL_Array       overlay_manager;
    //ISDL_Array       overlay_pool;

    //IJK_EGL         * egl;
} SDL_Vout_Opaque;

static int alloc_and_copy(AVPacket* pOutPkt, uint8_t* spspps, uint32_t spsppsSize, uint8_t* pIn, uint32_t inSize)
{
    int err;
    int startCodeLen = 3;

    err = av_grow_packet(pOutPkt, spsppsSize + inSize + startCodeLen);//使用av_grow_packet函数来确保pOutPkt有足够的空间来存储SPS/PPS数据、起始码和NAL单元数据。如果函数返回错误（即返回值小于0），则直接返回该错误。
    if (err < 0)
        return err;
    if (spspps)
    {
        memcpy(pOutPkt->data, spspps, spsppsSize); //如果提供了SPS/PPS数据（即spspps不为NULL），则将其复制到pOutPkt的数据区域
    }

    (pOutPkt->data + spsppsSize)[0] = 0;
    (pOutPkt->data + spsppsSize)[1] = 0;
    (pOutPkt->data + spsppsSize)[2] = 1;//在SPS/PPS数据之后设置起始码（对于H.264的NAL单元）。
    memcpy(pOutPkt->data + spsppsSize + startCodeLen, pIn, inSize);//将NAL单元数据复制到起始码之后
    return 0;
}

static int h264_extradata_to_annexb(
    const unsigned char* pCodecExtraData, const int CodecextraDataSize, AVPacket* pOutExtradata, int padding)
{
    char*          pExtraData = NULL;
    int            len        = 0;
    int            spsUnitNum, ppsUnitNum;
    int            unitSize, totolSize = 0;
    unsigned char  startCode[] = {0, 0, 0, 1};
    unsigned char* pOut        = NULL;
    int            err;

    pExtraData = (char *)pCodecExtraData + 4; // get rid of version,profile,etc
    len        = (*pExtraData++ & 0x03) + 1;

    /* get sps */
    spsUnitNum = (*pExtraData++ & 0x1f);
    while (spsUnitNum--)
    {
        unitSize = (pExtraData[0] << 8 | pExtraData[1]); // sps size,two bytes
        pExtraData += 2;
        totolSize += unitSize + sizeof(startCode);
        //av_log(NULL, AV_LOG_INFO, "unitSize:%d\n", unitSize);

        if (totolSize > INT_MAX - padding)
        {
            av_log(NULL, AV_LOG_ERROR, "Too big extradatasize\n");
            /* av_free(pOut); */
            return AVERROR(EINVAL);
        }

        if (pExtraData + unitSize > (char*)pCodecExtraData + CodecextraDataSize)
        {
            av_log(NULL, AV_LOG_ERROR, "Packet header is not contained in global extradata\n");
            /* av_free(pOut); */
            return AVERROR(EINVAL);
        }

        if ((err = av_reallocp(&pOut, totolSize + padding)) < 0)
        {
            av_log(NULL, AV_LOG_ERROR, "av_reallocp error\n");
            return err;
        }
        // av_log(NULL, AV_LOG_DEBUG, "totolSize(%d) unitSize(%d) lenStartCode(%d)\n", totolSize, unitSize, sizeof(startCode));
        // copy startcode
        memcpy(pOut + totolSize - unitSize - sizeof(startCode), startCode, sizeof(startCode));
        // copy sps data
        memcpy(pOut + totolSize - unitSize, pExtraData, unitSize);

        pExtraData += unitSize;
    }
    /* get pps */
    ppsUnitNum = (*pExtraData++ & 0x1f);
    while (ppsUnitNum--)
    {
        unitSize = (pExtraData[0] << 8 | pExtraData[1]); // sps size,two bytes
        pExtraData += 2;
        totolSize += unitSize + sizeof(startCode);
        //av_log(NULL, AV_LOG_INFO, "unitSize2:%d\n", unitSize);

        if (totolSize > INT_MAX - padding)
        {
            av_log(NULL, AV_LOG_ERROR, "Too big extradatasize\n");
            /* av_free(pOut); */
            return AVERROR(EINVAL);
        }

        if (pExtraData + unitSize > (char*)pCodecExtraData + CodecextraDataSize)
        {
            av_log(NULL, AV_LOG_ERROR, "Packet header is not contained in global extradata\n");
            /* av_free(pOut); */
            return AVERROR(EINVAL);
        }
        /* expand the memory*/
        if ((err = av_reallocp(&pOut, totolSize + padding)) < 0)
        {
            av_log(NULL, AV_LOG_ERROR, "av_reallocp error\n");
            return err;
        }

        // copy startcode
        memcpy(pOut + totolSize - unitSize - sizeof(startCode), startCode, sizeof(startCode));
        // copy pps data
        memcpy(pOut + totolSize - unitSize, pExtraData, unitSize);
        pExtraData += unitSize;
    }
    pOutExtradata->data = pOut;
    pOutExtradata->size = totolSize;
    return len;
}

int TransitionBuffer(AVPacket *PAvPkt, AVFormatContext* pAVFormatContext, CodecBufferInfo *avbuffer)
{
    LOGE( "swf  TransitionBuffer start\n");
    unsigned char* pData    = PAvPkt->data;
    unsigned char* pEnd     = NULL;
    int            dataSize = PAvPkt->size;

    int           curSize  = 0;
    int           nalusize = 0;
    int           i;
    unsigned char nalHeader, nalType;
    AVPacket      spsppsPkt;
    AVPacket*     pOutPkt;
    int           len;
    int           ret;

    pOutPkt        = av_packet_alloc();
    pOutPkt->data  = NULL;
    pOutPkt->size  = 0;
    spsppsPkt.data = NULL;
    spsppsPkt.size = 0;
    pEnd           = pData + dataSize;

    while (curSize < dataSize)
    {
        if (pEnd - pData < 4)
            goto fail;

        for (i = 0; i < 4; i++)//通过循环读取接下来的4个字节，并将它们组合成一个32位的整数nalusize，表示NAL单元的大小。
        {
            nalusize <<= 8;
            nalusize |= pData[i];
        }

        pData += 4;//移动到NAL单元数据的开始位置

        if (nalusize > (pEnd - pData) + 1 || nalusize < 0)//确保NAL单元的大小是有效的，即它不会超出数据包的边界，并且大小不能为负。如果无效，则跳转到fail标签。
            goto fail;

        nalHeader = *pData;//
        nalType   = nalHeader & 0x1F;//读取NAL单元的第一个字节作为头信息，并从中提取出NAL单元的类型（存储在最低的5位中）
        if (nalType == 5)//如果NAL单元的类型是SPS（类型5），则调用h264_extradata_to_annexb函数从编解码器参数中提取SPS和PPS数据，
        {//并使用alloc_and_copy函数将其与当前的NAL单元数据合并到输出数据包中。
            // get sps pps
            h264_extradata_to_annexb(pAVFormatContext->streams[PAvPkt->stream_index]->codecpar->extradata,
                pAVFormatContext->streams[PAvPkt->stream_index]->codecpar->extradata_size, &spsppsPkt,
                AV_INPUT_BUFFER_PADDING_SIZE);
                //AVPacket* spsppsPkt: 这是一个指向AVPacket结构体的指针，该结构体用于存储转换后的SPS和PPS数据（即Annex B格式的数据）。
                //函数会将转换后的数据填充到这个AVPacket的data成员中，并设置其size成员以反映数据的大小。

            ret = alloc_and_copy(pOutPkt, spsppsPkt.data, spsppsPkt.size, pData, nalusize);
            if (ret < 0)
                goto fail;
        } else//对于非SPS类型的NAL单元，只需在数据前添加起始码，并使用alloc_and_copy函数将其复制到输出数据包中
        {
            /* add start code */
            ret = alloc_and_copy(pOutPkt, NULL, 0, pData, nalusize);
            if (ret < 0)
                goto fail;
        }

        auto addr = OH_AVBuffer_GetAddr(reinterpret_cast<OH_AVBuffer *>(avbuffer->buffer));
        memcpy(addr, pOutPkt->data, pOutPkt->size);
        // len = fwrite(pOutPkt->data, 1, pOutPkt->size, pFd);//使用fwrite函数将输出数据包中的数据写入到指定的文件中。
        // if (len != pOutPkt->size)//如果写入的数据量不等于输出数据包的大小，则记录一个警告日志
        //     av_log(NULL, AV_LOG_WARNING, "fwrite warning(%d %d)\n", len, pOutPkt->size);

        // fflush(pFd);
        curSize += nalusize + 4; // add first 4 bytes;//更新curSize以反映已处理的字节数，并将pData指针移动到下一个NAL单元的开始位置。
        pData += nalusize;
    }
fail:
    av_packet_free(&pOutPkt);
    if (spsppsPkt.data)
    {
        free(spsppsPkt.data);
        spsppsPkt.data = NULL;
    }
    LOGE( "swf  TransitionBuffer end\n");
    return 0;
}

static int decoder_decode_ohos_frame(FFPlayer *ffp, Decoder *d, AVFrame *frame, VideoDecoder *opaque)
{
    LOGE( "swf  decoder_decode_ohos_frame start\n");
    int ret = AVERROR(EAGAIN);
        AVPacket pkt;
        AVFormatContext* fmt_ctx = ffp->is->ic;
        do {
            LOGE( "swf decoder_decode_ohos_frame in do while start \n");
            if (opaque->isStarted_ != true) {
                LOGE( "swf opaque->isStarted_ != true \n");
                return -1;
            }
            // 处理暂停逻辑  
            //std::unique_lock<std::mutex> pauseLock(mutex_); 

            // 等待直到播放器恢复播放（非暂停状态）  
           // doneCond_.wait(pauseLock, [this]() { return !this->isPaused_;});  
            // 释放互斥锁，因为接下来要获取另一个锁  
            //pauseLock.unlock();  

            // 获取解码器输入缓冲区的互斥锁  
            std::unique_lock<std::mutex> lock(opaque->decContext_->inputMutex_);
            bool condRet = opaque->decContext_->inputCond_.wait_for(
                lock, std::chrono::seconds(5), [opaque]() { return !opaque->isStarted_ || !opaque->decContext_->inputBufferInfoQueue_.empty(); });//检查输入缓冲区队列:
            //CHECK_AND_BREAK_LOG(isStarted_, "Work done, thread out");
            if (opaque->decContext_->inputBufferInfoQueue_.empty()) {
                LOGE( "swf opaque->decContext_->inputBufferInfoQueue_.empty() \n");
                return -1;
            }

            CodecBufferInfo avbuffer = opaque->decContext_->inputBufferInfoQueue_.front();
            opaque->decContext_->inputBufferInfoQueue_.pop();
            opaque->decContext_->inputFrameCount_++;
            lock.unlock();

            // 数据转换
            if (packet_queue_get_or_buffering(ffp, d->queue, &pkt, &d->pkt_serial, &d->finished) < 0)
                return -1;
            
            if (pkt.buf == nullptr) {
                LOGE( "swf  pkt == nullptr\n");
            }

            if (TransitionBuffer(&pkt, fmt_ctx, &avbuffer) < 0) {
                return -1;
            }

            if (OH_AVBuffer_GetBufferAttr(reinterpret_cast<OH_AVBuffer *>(avbuffer.buffer), &avbuffer.attr))//拿attr
            {
                LOGE( "swf  OH_AVBuffer_GetBufferAttr(reinterpret_cast<OH_AVBuffer *>\n");
                return -1;
            }
                

            if (opaque->PushInputData(avbuffer))//传送给解码器
            return -1;

        } while (d->queue->serial != d->pkt_serial);

}

//FFPlayer *ffp
int ffp_video_ohos_thread(FFPlayer *ffp, VideoDecoder *opaque)
{
    LOGE( "swf  ffp_video_ohos_thread start\n");
    VideoState *is = ffp->is;
    AVFrame *frame = av_frame_alloc();
    double pts;
    double duration;
    int ret;
    AVRational tb = is->video_st->time_base;
    AVRational frame_rate = av_guess_frame_rate(is->ic, is->video_st, NULL);//帧率
    for (;;) {
        ret = decoder_decode_ohos_frame(ffp, &is->viddec, frame, opaque);
        if (ret < 0) {
            LOGE( "swf  ffp_video_ohos_thread decoder_decode_ohos_frame end for end \n");
            return -1;
        }
    }
}


static int func_run_sync(IJKFF_Pipenode *node) {
    LOGE( "swf  func_run_sync start\n");
    VideoDecoder *opaque = static_cast<VideoDecoder *>(node->opaque);
    return ffp_video_ohos_thread(opaque->ffp, opaque);
}

void VideoDecoder::DecOutputThread()
{
    //sampleInfo_.frameInterval = MICRO_SECOND / (sampleInfo_.frameRate * speed_);//根据帧率和速度调整帧间隔
    while (true) {
        //停止
        // 暂停
        // std::unique_lock<std::mutex> pauseLock(mutex_);
        // doneCond_.wait(pauseLock, [this]() { return !this->isPaused_;});
        // pauseLock.unlock();

        //thread_local auto lastPushTime = std::chrono::system_clock::now();//记录上一次推送的时间

        std::unique_lock<std::mutex> lock(decContext_->outputMutex_);
        // bool condRet = decContext_->outputCond_.wait_for(
        //     lock, std::chrono::seconds(5), [this]() { return !isStarted_ || !decContext_->outputBufferInfoQueue_.empty(); });
        decContext_->outputCond_.wait(lock, [this]() {return !decContext_->outputBufferInfoQueue_.empty();});


        CodecBufferInfo bufferInfo = decContext_->outputBufferInfoQueue_.front();//从缓冲区队列拿数据
        decContext_->outputBufferInfoQueue_.pop();
        decContext_->outputFrameCount_++;
        lock.unlock();

        int32_t ret = videoDecoder_->FreeOutputData(bufferInfo.bufferIndex, true);
    }
    VideoDecoder::Release();
}

IJKFF_Pipenode *ffpipenode_create_video_decoder_from_ohos_mediacodec(FFPlayer *ffp, SDL_Vout *vout)
{
    LOGE( "swf  ffpipenode_create_video_decoder_from_ohos_mediacodec start \n");
    IJKFF_Pipenode *node = ffpipenode_alloc(sizeof(IJKFF_Pipenode_Opaque));
    VideoDecoder *opaque = new VideoDecoder();
    node->opaque = opaque;
    opaque->ffp = ffp;

    opaque->decoder_ = OH_VideoDecoder_CreateByMime("video/avc");//创建解码器  ffp->video_codec_name
    if(opaque->decoder_ == nullptr) {
        return nullptr;
    }

    OH_AVFormat *format = OH_AVFormat_Create();

    OH_AVFormat_SetIntValue(format, OH_MD_KEY_WIDTH, 640);
    OH_AVFormat_SetIntValue(format, OH_MD_KEY_HEIGHT, 480);
    OH_AVFormat_SetDoubleValue(format, OH_MD_KEY_FRAME_RATE, ffp->max_fps);
    OH_AVFormat_SetIntValue(format, OH_MD_KEY_PIXEL_FORMAT, AV_PIXEL_FORMAT_NV12);
    if(opaque->Config(format, vout->opaque->native_window) != AVCODEC_SAMPLE_ERR_OK) {//配置解码器 回调
        return NULL;
    }
 
    if(opaque->Start())//启动解码器
        return NULL;

    //node->func_destroy  = func_destroy;
    node->func_run_sync = func_run_sync;//解码

    opaque->doneCond_.notify_all();
    LOGE( "swf  ffpipenode_create_video_decoder_from_ohos_mediacodec end \n");
    return node;
}
