/**
 * @file main.cpp
 * @author SDK_TEAM
 * @brief
 * @version 0.1
 * @date 2022-10-24
 *
 * Copyright:
 * © 2018 北京灵汐科技有限公司 版权所有。
 * 注意：以下内容均为北京灵汐科技有限公司原创，未经本公司允许，不得转载，否则将视为侵权；对于不遵守此声明或者其他违法使用以下内容者，本公司依法保留追究权。
 * © 2018 Lynxi Technologies Co., Ltd. All rights reserved.
 * NOTICE: All information contained here is, and remains the property of Lynxi.
 * This file can not be copied or distributed without the permission of Lynxi
 * Technologies Co., Ltd.
 *
 */

#include <lyn_api.h>
#include <lyn_plugin.h>
#include <unistd.h>
#include <iostream>
#include "argsParser.h"
#include "batchMem.hpp"
#include "blockQueue.hpp"
#include "bufferPool.hpp"
#include "eventPool.hpp"
#include "framePool.hpp"
#include "frameRater.h"
#include "ipeLenetParam.h"
#include "lenetLib.h"
#include "logging.h"
#include "opencvWindow.hpp"
#include "sys/time.h"
#include "util.hpp"
#include "videoDecoder.h"
#include "videoEncoder.h"

using namespace std;

typedef struct {
  BatchMemPool *pIpePool;     // IPE 内存池对象
  BatchMem *pIpeBuffer;       // IPE 批量内存对象
  void *apuBuffer;            // APU 推理结果地址
  BufferPool *apuBufferPool;  // APU BufferPool
  FrameRater *pFrameRater;    // 帧率计算
} PLUGIN_DATA_T;

typedef struct {
  lynFrame_t *pVdecFrame;     // 解码 Frame
  FramePool *pVdecFramePool;  // Frame Pool
  std::string windowName;
} SHOW_DATA_T;

// 设备context结构体，用于设备对应的相关资源存储
typedef struct {
  lynContext_t ctx;        // context 句柄
  const char *pModelPath;  // 模型路径
} DEVICE_CONTEXT_T;

typedef struct {
  lynEvent_t beg_event;
  lynEvent_t end_event;
  EventPool *timeEventPool;
} EVENT_DATA_T;

// 线程函数
void ThreadFunc(std::string strWindowName, DEVICE_CONTEXT_T *pDevCtx,
                std::string strVideoPath, std::string strVideoOutPath) {
  CHECK_ERR(lynSetCurrentContext(pDevCtx->ctx));

  // 1. 加载模型
  ModelInfo lenetModel;
  lenetModel.LoadModelByPath(pDevCtx->pModelPath);

  // 2. 删除上次运行输出文件
  std::remove(strVideoOutPath.c_str());

  // 3. 初始化视频解码类，并获取输出信息
  VideoDecoder videoDecoder;
  videoDecoder.Init(strVideoPath, InputType::File, INT16_MAX);
  lynVdecOutInfo_t vdecOutInfo;
  videoDecoder.GetVdecOutInfo(&vdecOutInfo);
  uint32_t nVdecOutSize = vdecOutInfo.predictBufSize;

  // 4. 创建 stream 与 event
  lynStream_t ipeStream = nullptr;
  lynStream_t apuStream = nullptr;
  lynStream_t postStream = nullptr;
  CHECK_ERR(lynCreateStream(&ipeStream));
  CHECK_ERR(lynCreateStream(&apuStream));
  CHECK_ERR(lynCreateStream(&postStream));
  lynEvent_t ipeEvent = nullptr;
  lynEvent_t apuEvent = nullptr;
  CHECK_ERR(lynCreateEvent(&ipeEvent));
  CHECK_ERR(lynCreateEvent(&apuEvent));

  // 5. 创建各个资源池，避免重复申请与释放
  void *apuBuffer;
  BufferPool apuBufferPool(lenetModel.outputSize * lenetModel.batchSize, 5);
  BatchMem *pIpeOutBuf = nullptr;
  BatchMemPool oIpeOutMemPool(false, lenetModel.inputSize, lenetModel.batchSize,
                              5);
  BlockQueue<lynFrame_t *> blockQueue(5);
  FramePool vdecFramePool(nVdecOutSize, 5);
  FramePool vencRecvFramePool(nVdecOutSize, 5);
  EventPool m_eventPool_ipe(10);
  EventPool m_eventPool_apu(10);

  // 6. 开启解码线程
  videoDecoder.Start(pDevCtx->ctx, std::ref(blockQueue),
                     std::ref(vdecFramePool));

  FrameRater frameRate(strWindowName);
  frameRate.SetInterval(1);

  // 7. 加载 Plugin, 并设置后处理参数
  lynPlugin_t plugin;
  std::string m_pluginPath = ArgsParse::argPostPluginPath;
  CHECK_ERR(lynPluginRegister(&plugin, m_pluginPath.c_str()));
  LenetPostInfo para;
  para.imgFmt = LYN_PIX_FMT_NV12;
  para.imgDataW = vdecOutInfo.width;
  para.imgDataH = vdecOutInfo.height;
  para.apuOutSize = lenetModel.outputSize;

  // 8. 初始化 IPE 处理类
  IpeParamLenet ipeParam(lenetModel.width, lenetModel.height);
  ipeParam.SetImgInfo(vdecOutInfo.width, vdecOutInfo.height, LYN_PIX_FMT_NV12);

  // 9. 根据传入的 ShowType 决定是使用显示窗口类还是视频编码类
  VideoEncoder *videoEncoder = nullptr;
  if (ArgsParse::argShowType == ShowType::SaveFile) {
    videoEncoder = new VideoEncoder;
    videoEncoder->Init(vdecOutInfo, strVideoOutPath, &vdecFramePool,
                       &vencRecvFramePool);
  } else if (ArgsParse::argShowType == ShowType::DirectShow) {
    OpencvWindowManager::GetInstance()->SetWindowSize(
        strWindowName, vdecOutInfo.height, vdecOutInfo.width);
  }

  bool bEos = false;
  int iBatchIndex = 0;
  while (!bEos) {
    // 10. 取出一个解码 Frame
    lynFrame_t *pFrameTmp;
    bool takeResult = blockQueue.take(pFrameTmp, 60000);  // 设置超时时间 60S
    if (!takeResult) {
      std::cout << "take queue timeout, retry!" << std::endl;
      continue;
    }
    bEos = pFrameTmp->eos;
    if (iBatchIndex == 0) {
      pIpeOutBuf = oIpeOutMemPool.GetBatchMem();
    }

    EVENT_DATA_T *eventData_ipe = new EVENT_DATA_T();
    eventData_ipe->beg_event = m_eventPool_ipe.Pop();
    CHECK_ERR(lynRecordEvent(ipeStream, eventData_ipe->beg_event));

    // 11. IPE 处理
    ipeParam.CalcParam(ipeStream, pFrameTmp->data, pIpeOutBuf->GetElement());

    eventData_ipe->end_event = m_eventPool_ipe.Pop();
    CHECK_ERR(lynRecordEvent(ipeStream, eventData_ipe->end_event));
    eventData_ipe->timeEventPool = &m_eventPool_ipe;

    lynStreamAddCallback(
        ipeStream,
        [](void *data) -> int {
          EVENT_DATA_T *resulltBackEvent = (EVENT_DATA_T *)data;
          float ms = 0.0;
          lynEventElapsedTime(resulltBackEvent->beg_event,
                              resulltBackEvent->end_event, &ms);
          std::cout << "IPE took " << ms << " ms \n" << std::endl;

          resulltBackEvent->timeEventPool->Push(resulltBackEvent->beg_event);
          resulltBackEvent->timeEventPool->Push(resulltBackEvent->end_event);

          delete resulltBackEvent;
          return 0;
        },
        eventData_ipe);

    ++iBatchIndex;

    // 12. IPE输出满了或是最后一个包，进行apu处理
    if (iBatchIndex == lenetModel.batchSize || bEos) {
      CHECK_ERR(lynRecordEvent(ipeStream, ipeEvent));
      CHECK_ERR(lynStreamWaitEvent(apuStream, ipeEvent));
      apuBuffer = apuBufferPool.Pop();

      EVENT_DATA_T *eventData_apu = new EVENT_DATA_T();
      eventData_apu->beg_event = m_eventPool_apu.Pop();
      CHECK_ERR(lynRecordEvent(apuStream, eventData_apu->beg_event));

      // 13. 调用 APU 推理接口
      CHECK_ERR(lynExecuteModelAsync(apuStream, lenetModel.model,
                                     pIpeOutBuf->Buffer(), apuBuffer,
                                     lenetModel.batchSize));

      eventData_apu->end_event = m_eventPool_apu.Pop();
      CHECK_ERR(lynRecordEvent(apuStream, eventData_apu->end_event));
      eventData_apu->timeEventPool = &m_eventPool_apu;

      lynStreamAddCallback(
          apuStream,
          [](void *data) -> int {
            EVENT_DATA_T *resulltBackEvent = (EVENT_DATA_T *)data;
            float ms = 0.0;
            lynEventElapsedTime(resulltBackEvent->beg_event,
                                resulltBackEvent->end_event, &ms);
            std::cout << "APU took " << ms << " ms \n" << std::endl;

            resulltBackEvent->timeEventPool->Push(resulltBackEvent->beg_event);
            resulltBackEvent->timeEventPool->Push(resulltBackEvent->end_event);

            delete resulltBackEvent;
            return 0;
          },
          eventData_apu);

      // 14. 在 APU 推理完之后，调用 Plugin 进行后处理
      CHECK_ERR(lynRecordEvent(apuStream, apuEvent));
      CHECK_ERR(lynStreamWaitEvent(postStream, apuEvent));
      para.imgData = pFrameTmp->data;
      para.apuOut = apuBuffer;
      CHECK_ERR(lynPluginRunAsync(postStream, plugin, "lynLenetProcess", &para,
                                  sizeof(para)));

      PLUGIN_DATA_T *callbackData = new PLUGIN_DATA_T;
      callbackData->pIpeBuffer = pIpeOutBuf;
      callbackData->pIpePool = &oIpeOutMemPool;
      callbackData->apuBuffer = apuBuffer;
      callbackData->apuBufferPool = &apuBufferPool;
      callbackData->pFrameRater = &frameRate;

      auto callback = [](void *para) -> lynError_t {
        PLUGIN_DATA_T *callbackData = (PLUGIN_DATA_T *)para;
        callbackData->pFrameRater->AddFrame(1);
        callbackData->apuBufferPool->Push(
            callbackData->apuBuffer);  // 归还 apu 推理数据
        callbackData->pIpePool->PutBatchMem(
            callbackData->pIpeBuffer);  // 归还 IPE 处理数据
        delete callbackData;
        return 0;
      };
      lynStreamAddCallback(postStream, callback, callbackData);
      // 15. 根据传入的 ShowType 决定是进行视频编码还是直接显示
      if (ArgsParse::argShowType == ShowType::SaveFile) {
        // 视频编码
        pFrameTmp->eos = bEos;
        videoEncoder->WaitForStream(postStream);
        videoEncoder->EncodeImage(pFrameTmp);

      } else if (ArgsParse::argShowType == ShowType::DirectShow) {
        // 直接显示
        auto showCallback = [](void *para) -> int {
          SHOW_DATA_T *callbackData = (SHOW_DATA_T *)para;
          OpencvWindowManager::GetInstance()->ShowDevicesData(
              callbackData->pVdecFrame, callbackData->windowName);
          callbackData->pVdecFramePool->Push(callbackData->pVdecFrame->data);
          delete callbackData;
          return 0;
        };
        SHOW_DATA_T *showData = new SHOW_DATA_T;
        showData->pVdecFrame = pFrameTmp;
        showData->pVdecFramePool = &vdecFramePool;
        showData->windowName = strWindowName;
        lynStreamAddCallback(postStream, showCallback, showData);
      }
      iBatchIndex = 0;
    }
  }

  // 16. 等待流中事件处理完，并销毁资源
  CHECK_ERR(lynSynchronizeStream(ipeStream));
  CHECK_ERR(lynSynchronizeStream(apuStream));
  CHECK_ERR(lynSynchronizeStream(postStream));
  CHECK_ERR(lynDestroyStream(ipeStream));
  CHECK_ERR(lynDestroyStream(apuStream));
  CHECK_ERR(lynDestroyStream(postStream));
  CHECK_ERR(lynDestroyEvent(ipeEvent));
  CHECK_ERR(lynDestroyEvent(apuEvent));
  CHECK_ERR(lynPluginUnregister(plugin));  // Unregister plugin

  if (ArgsParse::argShowType == ShowType::DirectShow) {
    OpencvWindowManager::GetInstance()->Close(strWindowName);
  } else if (ArgsParse::argShowType == ShowType::SaveFile) {
    videoEncoder->UnInit();
    delete videoEncoder;
    videoEncoder = nullptr;
  }
  videoDecoder.Stop();
  videoDecoder.UnInit();
  lenetModel.UnLoadModel();
}

int main(int argc, char *argv[]) {
  // 1. 命令行解析
  if (ArgsParse::lenetArgsParser(argc, argv) == false) {
    return -1;
  }

  std::string strModelPath = ArgsParse::argModelPath;
  std::string strVideoPath = ArgsParse::argInputFilePath;
  std::string strVideoOutTmp = strVideoPath.substr(0, strVideoPath.rfind('.'));

  // 2. 创建 context 与生成 windowName
  std::vector<DEVICE_CONTEXT_T> vecDevCtx(ArgsParse::argChipsVec.size());
  std::vector<std::string> vecWindowName;
  for (uint32_t i = 0; i < ArgsParse::argChipsVec.size(); ++i) {
    CHECK_ERR(lynCreateContext(&vecDevCtx[i].ctx, ArgsParse::argChipsVec[i]));
    CHECK_ERR(lynRegisterErrorHandler(StreamErrorHandler, nullptr));
    vecDevCtx[i].pModelPath = strModelPath.c_str();
    for (uint32_t j = 0; j < ArgsParse::argChannel; ++j) {
      vecWindowName.emplace_back("device" +
                                 std::to_string(ArgsParse::argChipsVec[i]) +
                                 "_channel" + std::to_string(j));
    }
  }

  // 如果选择直接显示
  if (ArgsParse::argShowType == ShowType::DirectShow) {
    OpencvWindowManager::GetInstance()->SetWindowNames(vecWindowName);
  }

  // 3. 为每一个通道创建线程
  std::vector<std::thread> vecThread;
  for (uint32_t i = 0; i < vecWindowName.size(); ++i) {
    std::string strVideoOutPath =
        strVideoOutTmp + "_Plugin_" + vecWindowName[i] + ".264";
    std::thread oThreadTmp(ThreadFunc, vecWindowName[i],
                           &vecDevCtx[i / ArgsParse::argChannel], strVideoPath,
                           strVideoOutPath);
    vecThread.push_back(std::move(oThreadTmp));
  }
  if (ArgsParse::argShowType == ShowType::DirectShow) {
    OpencvWindowManager::GetInstance()->Process();
  }

  // 4. 等待线程结束
  for (size_t i = 0; i < vecThread.size(); ++i) {
    vecThread[i].join();
  }

  // 5. 释放资源
  for (auto &devCtx : vecDevCtx) {
    CHECK_ERR(lynSetCurrentContext(devCtx.ctx));
    if (devCtx.ctx) {
      lynDestroyContext(devCtx.ctx);
    }
  }

  return 0;
}
