/********************************************************************************
 * Copyright(c) 2020-2025 VINCENT_WY All rights reserved.
 * - Filename
 * - Author  Vincent
 * - Version V1.0.0
 * - Date    2023/09/21
 * - Brief
 * - FunctionList:
 ******************************************************************************
 * History:
 *
 *
 *
 ******************************************************************************
 */
#include "NoBufferedReplayImpl.h"

#include <fstream>

#include "FrameWrap.h"
#include "logutil.h"
#include "libyuv.h"
using namespace std;

NoBufferedReplayImpl::NoBufferedReplayImpl(const int& id, const std::string& path)
    : Tag("[NoBufferedReplayImpl" + std::to_string(id) + "]: "), _cameraId(id), _root(path) {
    LOGD("%s read nv12 file ...", Tag.c_str());
    _fileReaderPtr = std::make_unique<FileReader>(path, false);
    _fileInfoList = _fileReaderPtr->getFileInfoList();
    LOGD("%s Create ...", Tag.c_str());
}

NoBufferedReplayImpl::~NoBufferedReplayImpl() {
    LOGD("%s Destroyed ...", Tag.c_str());
}

void NoBufferedReplayImpl::start(
    const std::function<void(std::shared_ptr<CameraFrame>&&)>& callback,
    const std::function<void(std::shared_ptr<BevFrame>&&)>& bc, const long long int& time) {
    if (_fileInfoList.empty()) {
        LOGE("%s NoFile in list !!!", Tag.c_str());
        return;
    }
    _startTime = time;
    _readLoop = std::make_unique<std::thread>(&NoBufferedReplayImpl::ReadLoop, this);
    _invokeLoop =
        std::make_unique<std::thread>(&NoBufferedReplayImpl::InvokeLoop, this, callback, bc);
    _status = Status::START;
}
bool NoBufferedReplayImpl::isComplete() {
    return _isComplete;
}

long long int NoBufferedReplayImpl::getFirstFrameTm() {
    if (_fileInfoList.empty()) return -1;
    return _fileInfoList[0].timestamp;
}

void NoBufferedReplayImpl::doRepeat() {
    _status = Status::REPEAT;
}

void NoBufferedReplayImpl::stop() {
    LOGD("%s Stop 1...", Tag.c_str());
    _status = Status::IDLE;
    _isExit = true;
    _frameCv.notify_all();
    if ((_readLoop != nullptr) && _readLoop->joinable()) {
        _readLoop->join();
    }
    if ((_invokeLoop != nullptr) && _invokeLoop->joinable()) {
        _invokeLoop->join();
    }
    LOGD("%s Stop 2...", Tag.c_str());
}

void NoBufferedReplayImpl::ReadLoop() {
    int index = 0;
    long long int mLastTimeStamp{};
    long long int needSendTime{};
    auto decoder =  std::make_unique<libyuv::MJpegDecoder>();
    while (!_isExit) {
        switch (_status) {
            case Status::START: {
                index = 0;
                _status = Status::CONTINUE;
                mLastTimeStamp = _startTime;
                needSendTime = TimeUtil::now_us();
                _isComplete = false;
            } break;

            case Status::REPEAT: {
                _status = Status::START;
            } break;

            case Status::CONTINUE: {
                auto frameInterval = _fileInfoList[index].timestamp - mLastTimeStamp;
                unsigned long long t1{}, t2{}, readInterval{};
                long long int delayTime{};
                std::shared_ptr<CameraFrame> frame{};
                std::shared_ptr<uint8_t[]> nv12_buffer{};
                FILE* fp_r{};
                if (frameInterval == 0) goto NEXT_FRAME;
                t1 = TimeUtil::now_us();
                frame = std::make_shared<CameraFrame>();
                frame->cameraId = _cameraId;
                frame->width = _fileInfoList[index].width;
                frame->height = _fileInfoList[index].height;
                frame->size = frame->width * frame->height * 3 / 2;
                frame->timestamp.tv_sec = _fileInfoList[index].timestamp / 1000000;
                frame->timestamp.tv_usec = _fileInfoList[index].timestamp % 1000000;
                nv12_buffer = shared_ptr<uint8_t[]>(new uint8_t[frame->width * frame->height * 3],
                                                    [](uint8_t* p) { delete[] p; });
                fp_r = fopen(_fileInfoList[index].fileName.c_str(), "rb");
                if (NULL == fp_r) {
                    LOGE("%s failed to open [%d]", Tag.c_str(),
                         _fileInfoList[index].fileName.c_str());
                    fclose(fp_r);
                    continue;
                }
                #if 1
                {
                    fseek(fp_r,0,SEEK_END);
                    long len = ftell(fp_r);
                    fseek(fp_r,0,SEEK_SET);
                    fread(nv12_buffer.get(), len,1, fp_r);
                    int nv12size = frame->width * frame->height * 3 / 2;
                    if(len<nv12size){
                        int ret = decoder->LoadFrame(nv12_buffer.get(),len);
                        int srcSize = decoder->GetWidth()*decoder->GetHeight()*3/2;
                        std::vector<uint8_t> tmp;tmp.resize(srcSize);
                        uint8_t* planes[]={tmp.data(),tmp.data()+srcSize*2/3,tmp.data()+srcSize*5/6};
                        decoder->DecodeToBuffers(planes, decoder->GetWidth(),decoder->GetHeight());
                        decoder->UnloadFrame();
                        libyuv::I420ToNV12(tmp.data(),frame->width,tmp.data()+nv12size*2/3,frame->width/2,tmp.data()+nv12size*5/6,frame->width/2,
                                            nv12_buffer.get(),frame->width,nv12_buffer.get()+nv12size*2/3,frame->width,frame->width,frame->height);
                    }
                }
                #else
                // 将文件数据读到image buffer中
                fread(nv12_buffer.get(), sizeof(unsigned char),
                      frame->width * frame->height * 3 / 2, fp_r);
                #endif      
                fclose(fp_r);
                frame->buffer = std::move(nv12_buffer);
                t2 = TimeUtil::now_us();
                // 等待两帧之间的时间间隔，时间是us
                readInterval = t2 - t1;

                needSendTime += frameInterval;
                delayTime = needSendTime - TimeUtil::now_us();
                LOGD("Replay get camera [%d] frame timestamp %ld, frameInterval %llu, readInterval %llu, delay %lld\n",
                    _cameraId, _fileInfoList[index].timestamp,
                    frameInterval, readInterval, delayTime);

                if (delayTime > 0) {
                    std::this_thread::sleep_for(std::chrono::microseconds(delayTime));
                }

                {
                    std::lock_guard<std::mutex> lock(_frameMtx);
                    _frameDqu.push_back(frame);
                }

                _frameCv.notify_one();

                mLastTimeStamp = _fileInfoList[index].timestamp;
            NEXT_FRAME:
                index++;
                while((long long)readInterval-delayTime > _fileInfoList[index].timestamp - mLastTimeStamp  
                        && index < (int)_fileInfoList.size()){
                    index++;
                }
                if (index >= (int)_fileInfoList.size()) {
                    _status = Status::IDLE;
                    _isComplete = true;
                    index = 0;
                    LOGD("%s camera [%d] complete.\n", Tag.c_str(), _cameraId);
                }
            } break;

            case Status::IDLE: {
                std::this_thread::sleep_for(std::chrono::milliseconds(3));
            } break;

            default:
                break;
        }
    }
}

void NoBufferedReplayImpl::InvokeLoop(
    const std::function<void(std::shared_ptr<CameraFrame>&&)>& callbak,
    const std::function<void(std::shared_ptr<BevFrame>&&)>& bc) {
    while (!_isExit) {
        std::unique_lock<std::mutex> lock(_frameMtx);
        _frameCv.wait(lock, [this] { return _frameDqu.size() > 0 || _isExit; });
        if (_isExit) break;
        auto frame = _frameDqu.front();
        _frameDqu.pop_front();
        lock.unlock();
        LOGD("Replay send camera [%d] frame timestamp %ld\n", _cameraId,
               frame->timestamp.tv_sec * 1000000 + frame->timestamp.tv_usec);
        if (_cameraId == FrameWrap::DEVICE_TYPE_BEV) {
            auto bf = make_shared<BevFrame>();
            bf->width = frame->width;
            bf->height = frame->height;
            bf->timestamp = frame->timestamp;
            bf->size = frame->size;
            bf->format = frame->format;
            bf->buffer = std::move(frame->buffer);
            bc(std::move(bf));
        } else {
            callbak(std::move(frame));
        }
        if (TimeUtil::fps_calc_inc(_fps)) {
            LOGD("[FPS] Replay camera [%d] fps %0.2f, total fps %0.2f",
                _cameraId, _fps.ins_fps, _fps.avg_fps);
        }
    }
}