/**
 * @file Channel.hpp
 * @author SDK_TEAM
 * @brief
 * @version 0.1
 * @date 2022-12-01
 *
 * Copyright:
 * © 2018 北京灵汐科技有限公司 版权所有。
 * 注意：以下内容均为北京灵汐科技有限公司原创，未经本公司允许，不得转载，否则将视为侵权；对于不遵守此声明或者其他违法使用以下内容者，本公司依法保留追究权。
 * © 2018 Lynxi Technologies Co., Ltd. All rights reserved.
 * NOTICE: All information contained here is, and remains the property of Lynxi.
 * This file can not be copied or distributed without the permission of Lynxi
 * Technologies Co., Ltd.
 *
 */

#pragma once

#include <lyn_api.h>
#include <lyn_plugin.h>
#include <unistd.h>
#include <iostream>
#include "argsParser.h"
#include "batchMem.hpp"
#include "blockQueue.hpp"
#include "boxInfoEncode.h"
#include "bufferPool.hpp"
#include "framePool.hpp"
#include "frameRater.h"
#include "ipeYolov5Param.h"
#include "opencvWindow.hpp"
#include "osd_plugin.h"
#include "sys/time.h"
#include "util.hpp"
#include "videoDecoder.h"
#include "videoEncoder.h"
#include "yolox_post_process.h"

using namespace std;
std::string GetBasePath(const std::string &filePath) {
  size_t lastSeparatorPos = filePath.find_last_of("/\\");
  if (lastSeparatorPos != std::string::npos) {
    return filePath.substr(0, lastSeparatorPos);
  }
  return "";
}
typedef struct {
  BatchMemPool *pIpePool;      // IPE 内存池对象
  BatchMem *pIpeBuffer;        // IPE 批量内存对象
  void *apuBuffer;             // APU 推理结果地址
  void *apuBuffer2;            // APU 推理结果地址
  BufferPool *apuBufferPool;   // APU BufferPool
  BufferPool *apuBufferPool2;  // APU BufferPool
  FrameRater *pFrameRater;     // 帧率计算
} PLUGIN_DATA_T;

typedef struct {
  lynFrame_t *pVdecFrame;     // 解码 Frame
  FramePool *pVdecFramePool;  // Frame Pool
  std::string windowName;
  BlockQueue<lynFrame_t *> *frameQueue;
} SHOW_DATA_T;

// 设备context结构体，用于设备对应的相关资源存储
typedef struct {
  lynContext_t ctx;        // context 句柄
  const char *pModelPath;  // 模型路径
} DEVICE_CONTEXT_T;

struct ChannelInfo {
  std::string inputPath;
  std::string outputPath;
  std::string channelName;
  std::string modelPath;
  std::string osdpluginPath;
  std::string postpluginPath;
  int deviceID;
  lynContext_t *context;
  ShowType showType;
  ModelType modelType;
  InputType inputType;
  int maxFps;
};

typedef struct {
  lynFrame_t *frame;
  FramePool *pVdecFramePool;  // Frame Pool
  lynBoxesInfo boxInfo;
  lynBoxesInfo boxInfo2;
  BoxInfoEncode *encode;
} RESULT_BOXINFO_T2;

struct Channel {
  std::string m_path = "";
  VideoDecoder m_decoder;
  BlockQueue<lynFrame_t *> m_queue;
  FramePool *m_framePool;
  lynContext_t *m_context;
  lynVdecOutInfo_t m_videoInfo;
  std::thread *m_thread;
  std::string m_channelName;
  LabelList labelList;
  int labelCount;

  Channel() : m_queue(5) {}

  ~Channel() {
    if (m_framePool) {
      CHECK_ERR(lynSetCurrentContext(*m_context));
      delete m_framePool;
    }
  }

  bool Init(ChannelInfo &channelInfo) {
    std::string label_file = "/../yolox/label/label.v5x7.json";
    std::string label_path =
        GetBasePath(channelInfo.postpluginPath) + label_file;

    ifstream fJson(label_path);
    stringstream ss;
    ss << fJson.rdbuf();
    nlohmann::json js = nlohmann::json::parse(ss.str());

    labelCount = js.size();
    if (labelCount == 0) {
      cout << "error : label file is not invalid." << endl;
      return 0;
    }

    labelList.labelNum = labelCount;
    labelList.labels = new LabelInfo[labelCount];
    memset(labelList.labels, 0, sizeof(LabelInfo) * labelCount);
    string labelName;
    for (int i = 0; i < labelCount; ++i) {
      labelName = js[i]["desc"].get<string>();
      strncpy(labelList.labels[i].name, labelName.c_str(),
              sizeof(labelList.labels[i].name) - 1);
    }

    m_channelName = channelInfo.channelName;
    m_context = channelInfo.context;
    m_thread = new std::thread(&Channel::ThreadFunc, this, channelInfo);

    return true;
  }

  void Close() {
    if (m_thread != nullptr) {
      m_thread->join();
      delete m_thread;
      m_thread = nullptr;
    }
    delete[] labelList.labels;
  }

  void putFrame(lynFrame_t *frame) { m_framePool->Push(frame); }

  bool getFrame(lynFrame_t **frame, int timeout = 1000) {
    bool ret = m_queue.take(*frame, timeout);
    return ret;
  }

  void getVideoInfo(lynVdecOutInfo_t &videoInfo) { videoInfo = m_videoInfo; }

  void ThreadFunc(ChannelInfo channelInfo) {
    CHECK_ERR(lynSetCurrentContext(*channelInfo.context));

    // 1. 加载模型
    ModelInfo yolov5Info;
    yolov5Info.LoadModelByPath(channelInfo.modelPath.c_str());

    ModelInfo yolov5Info2;
    yolov5Info2.LoadModelByPath(channelInfo.modelPath.c_str());

    // 2. 根据传入的ShowType，生成输出文件路径，并删除上次运行输出文件
    std::string outPath = channelInfo.outputPath;
    if (channelInfo.showType == ShowType::SaveFile) {
      outPath += ".264";
      std::remove(outPath.c_str());
    } else if (channelInfo.showType == ShowType::OnlyApu) {
      outPath += "_BoxInfo.json";
      std::remove(outPath.c_str());
    }

    // 3. 初始化视频解码类，并获取输出信息
    VideoDecoder videoDecoder;
    videoDecoder.Init(channelInfo.inputPath, channelInfo.inputType,
                      channelInfo.maxFps);
    lynVdecOutInfo_t vdecOutInfo;
    videoDecoder.GetVdecOutInfo(&vdecOutInfo);
    m_videoInfo = vdecOutInfo;
    uint32_t nVdecOutSize = vdecOutInfo.predictBufSize;

    // 4. 创建 stream 与 event
    lynStream_t ipeStream = nullptr;
    lynStream_t apuStream = nullptr;
    lynStream_t postStream = nullptr;
    CHECK_ERR(lynCreateStream(&ipeStream));
    CHECK_ERR(lynCreateStream(&apuStream));
    CHECK_ERR(lynCreateStream(&postStream));
    lynEvent_t ipeEvent = nullptr;
    lynEvent_t apuEvent = nullptr;
    CHECK_ERR(lynCreateEvent(&ipeEvent));
    CHECK_ERR(lynCreateEvent(&apuEvent));

    // 5. 创建各个资源池，避免重复申请与释放
    void *apuBuffer;
    void *apuBuffer2;
    BufferPool apuBufferPool(yolov5Info.outputSize * yolov5Info.batchSize, 5);
    BufferPool apuBufferPool2(yolov5Info2.outputSize * yolov5Info2.batchSize,
                              5);
    BatchMem *pIpeOutBuf = nullptr;
    BatchMemPool oIpeOutMemPool(false, yolov5Info.inputSize,
                                yolov5Info.batchSize, 5);
    BlockQueue<lynFrame_t *> blockQueue(5);
    m_framePool = new FramePool(nVdecOutSize, 5);
    FramePool vencRecvFramePool(nVdecOutSize, 5);

    // 6. 开启解码线程
    videoDecoder.Start(*channelInfo.context, std::ref(blockQueue),
                       std::ref(*m_framePool));

    FrameRater frameRate(channelInfo.channelName);
    frameRate.SetInterval(1);

    // 7. 加载 Plugin, 并设置后处理参数
    lynPlugin_t postplugin;
    CHECK_ERR(
        lynPluginRegister(&postplugin, channelInfo.postpluginPath.c_str()));
    lynPlugin_t postplugin2;
    CHECK_ERR(
        lynPluginRegister(&postplugin2, channelInfo.postpluginPath.c_str()));
    lynPlugin_t osdplugin;
    CHECK_ERR(lynPluginRegister(&osdplugin, channelInfo.osdpluginPath.c_str()));

    YoloxPostProcessInfo_t post_info;
    memset(&post_info, 0, sizeof(YoloxPostProcessInfo_t));
    post_info.is_pad_resize = 1;
    post_info.score_threshold = 0.25;
    post_info.nms_threshold = 0.45;
    post_info.nms_top_k = 500;
    post_info.anchorSize = 3;
    post_info.width = yolov5Info.width;
    post_info.height = yolov5Info.height;
    post_info.ori_width = vdecOutInfo.width;
    post_info.ori_height = vdecOutInfo.height;
    post_info.class_num = labelCount;
    LabelInfo *devLabels = nullptr;
    CHECK_ERR(
        lynMalloc((void **)&devLabels, sizeof(LabelInfo) * labelList.labelNum));
    CHECK_ERR(lynMemcpy(devLabels, labelList.labels,
                        sizeof(LabelInfo) * labelList.labelNum,
                        ClientToServer));
    post_info.labelList.labelNum = labelList.labelNum;
    post_info.labelList.labels = devLabels;
    lynBoxesInfo *pDevBoxesInfo;
    CHECK_ERR(lynMalloc((void **)&pDevBoxesInfo, sizeof(lynBoxesInfo)));
    post_info.boxesInfo = (lynBoxesInfo *)pDevBoxesInfo;

    YoloxPostProcessInfo_t post_info2;
    memset(&post_info2, 0, sizeof(YoloxPostProcessInfo_t));
    post_info2.is_pad_resize = 1;
    post_info2.score_threshold = 0.25;
    post_info2.nms_threshold = 0.45;
    post_info2.nms_top_k = 500;
    post_info2.width = yolov5Info2.width;
    post_info2.height = yolov5Info2.height;
    post_info2.ori_width = vdecOutInfo.width;
    post_info2.ori_height = vdecOutInfo.height;
    post_info2.class_num = labelCount;
    LabelInfo *devLabels2 = nullptr;
    CHECK_ERR(lynMalloc((void **)&devLabels2,
                        sizeof(LabelInfo) * labelList.labelNum));
    CHECK_ERR(lynMemcpy(devLabels2, labelList.labels,
                        sizeof(LabelInfo) * labelList.labelNum,
                        ClientToServer));
    post_info2.labelList.labelNum = labelList.labelNum;
    post_info2.labelList.labels = devLabels2;
    lynBoxesInfo *pDevBoxesInfo2;
    CHECK_ERR(lynMalloc((void **)&pDevBoxesInfo2, sizeof(lynBoxesInfo)));
    post_info2.boxesInfo = (lynBoxesInfo *)pDevBoxesInfo2;

    // 8. 初始化 IPE 处理类
    IpeParamYolov5 ipeParam(yolov5Info.width, yolov5Info.height);
    ipeParam.SetImgInfo(vdecOutInfo.width, vdecOutInfo.height,
                        LYN_PIX_FMT_NV12);

    // 9. 根据传入的 ShowType 决定是使用显示窗口类, 视频编码类，BoxInfo 编码类
    VideoEncoder *videoEncoder = nullptr;
    BoxInfoEncode *boxInfoEncode = nullptr;
    if (channelInfo.showType == ShowType::SaveFile) {
      videoEncoder = new VideoEncoder;
      videoEncoder->Init(vdecOutInfo, outPath, m_framePool, &vencRecvFramePool);
    } else if (channelInfo.showType == ShowType::DirectShow) {
      // OpencvWindowManager::GetInstance()->SetWindowSize(strWindowName,
      // vdecOutInfo.height,
      //                                                   vdecOutInfo.width);
    } else if (channelInfo.showType == ShowType::OnlyApu) {
      boxInfoEncode = new BoxInfoEncode;
      boxInfoEncode->Init(outPath);
    }

    bool bEos = false;
    int iBatchIndex = 0;
    while (!bEos) {
      // 10. 取出一个解码 Frame
      lynFrame_t *pFrameTmp;
      int ret = blockQueue.take(pFrameTmp);
      if (!ret) {
        continue;
      }
      bEos = pFrameTmp->eos;
      if (iBatchIndex == 0) {
        pIpeOutBuf = oIpeOutMemPool.GetBatchMem();
      }

      // 11. IPE 处理
      ipeParam.CalcParam(ipeStream, pFrameTmp->data, pIpeOutBuf->GetElement());

      ++iBatchIndex;

      // 12. IPE输出满了或是最后一个包，进行apu处理
      if (iBatchIndex == yolov5Info.batchSize || bEos) {
        CHECK_ERR(lynRecordEvent(ipeStream, ipeEvent));
        CHECK_ERR(lynStreamWaitEvent(apuStream, ipeEvent));
        apuBuffer = apuBufferPool.Pop();
        apuBuffer2 = apuBufferPool2.Pop();
        // 13. 调用 APU 推理接口
        CHECK_ERR(lynExecuteModelAsync(apuStream, yolov5Info.model,
                                       pIpeOutBuf->Buffer(), apuBuffer,
                                       yolov5Info.batchSize));

        CHECK_ERR(lynExecuteModelAsync(apuStream, yolov5Info2.model,
                                       pIpeOutBuf->Buffer(), apuBuffer2,
                                       yolov5Info2.batchSize));
        // 14. 在 APU 推理完之后，调用 Plugin 进行后处理
        CHECK_ERR(lynRecordEvent(apuStream, apuEvent));
        CHECK_ERR(lynStreamWaitEvent(postStream, apuEvent));
        post_info.output_tensor = apuBuffer;
        post_info2.output_tensor = apuBuffer2;
        CHECK_ERR(lynPluginRunAsync(postStream, postplugin,
                                    "lynYoloxPostProcess", &post_info,
                                    sizeof(post_info)));

        CHECK_ERR(lynPluginRunAsync(postStream, postplugin2,
                                    "lynYoloxPostProcess", &post_info2,
                                    sizeof(post_info2)));
        PLUGIN_DATA_T *callbackData = new PLUGIN_DATA_T;
        callbackData->pIpeBuffer = pIpeOutBuf;
        callbackData->pIpePool = &oIpeOutMemPool;
        callbackData->apuBuffer = apuBuffer;
        callbackData->apuBuffer2 = apuBuffer2;
        callbackData->apuBufferPool = &apuBufferPool;
        callbackData->apuBufferPool2 = &apuBufferPool2;
        callbackData->pFrameRater = &frameRate;

        auto callback = [](void *para) -> lynError_t {
          PLUGIN_DATA_T *callbackData = (PLUGIN_DATA_T *)para;
          callbackData->pFrameRater->AddFrame(1);
          callbackData->apuBufferPool->Push(callbackData->apuBuffer);
          callbackData->apuBufferPool2->Push(callbackData->apuBuffer2);
          callbackData->pIpePool->PutBatchMem(callbackData->pIpeBuffer);
          delete callbackData;
          return 0;
        };
        lynStreamAddCallback(postStream, callback, callbackData);
        // 选择 OnlyApu 时，不进行画框处理
        if (channelInfo.showType != ShowType::OnlyApu) {
          lynDrawBoxAndTextPara para;
          para.imgData = pFrameTmp->data;
          para.imgFmt = LYN_PIX_FMT_NV12;
          para.imgW = vdecOutInfo.width;
          para.imgH = vdecOutInfo.height;
          para.boxesInfo = pDevBoxesInfo;
          para.boxColor = DRAW_COLOR_BLUE;
          para.boxThick = DRAW_THICK_4;
          para.fontSize = FONT_SIZE_24;
          para.fontColor = DRAW_COLOR_BLUE;
          CHECK_ERR(lynPluginRunAsync(
              postStream, osdplugin, "lynDrawBoxAndText", &para, sizeof(para)));

          lynDrawBoxAndTextPara para2;
          para2.imgData = pFrameTmp->data;
          para2.imgFmt = LYN_PIX_FMT_NV12;
          para2.imgW = vdecOutInfo.width;
          para2.imgH = vdecOutInfo.height;
          para2.boxesInfo = pDevBoxesInfo2;
          para2.boxColor = DRAW_COLOR_RED;
          para2.boxThick = DRAW_THICK_2;
          para2.fontSize = FONT_SIZE_24;
          para2.fontColor = DRAW_COLOR_RED;
          CHECK_ERR(lynPluginRunAsync(postStream, osdplugin,
                                      "lynDrawBoxAndText", &para2,
                                      sizeof(para2)));
        }

        // 15. 根据传入的 ShowType 决定是进行视频编码还是直接显示
        if (channelInfo.showType == ShowType::SaveFile) {
          // 视频编码
          pFrameTmp->eos = bEos;
          videoEncoder->WaitForStream(postStream);
          videoEncoder->EncodeImage(pFrameTmp);
        } else if (channelInfo.showType == ShowType::DirectShow) {
          // 直接显示
          auto showCallback = [](void *para) -> int {
            SHOW_DATA_T *callbackData = (SHOW_DATA_T *)para;
            callbackData->frameQueue->put(callbackData->pVdecFrame);
            delete callbackData;
            return 0;
          };
          SHOW_DATA_T *showData = new SHOW_DATA_T;
          showData->pVdecFrame = pFrameTmp;
          showData->pVdecFramePool = m_framePool;
          showData->frameQueue = &m_queue;
          lynStreamAddCallback(postStream, showCallback, showData);
        } else if (channelInfo.showType == ShowType::OnlyApu) {
          // 保存推理结果
          RESULT_BOXINFO_T2 *resultCallback = new RESULT_BOXINFO_T2;
          // 将推理结果拷贝至 Host 侧
          lynMemcpyAsync(postStream, &resultCallback->boxInfo,
                         post_info.boxesInfo, sizeof(lynBoxesInfo),
                         ServerToClient);
          lynMemcpyAsync(postStream, &resultCallback->boxInfo2,
                         post_info2.boxesInfo, sizeof(lynBoxesInfo),
                         ServerToClient);
          resultCallback->encode = boxInfoEncode;
          resultCallback->frame = pFrameTmp;
          resultCallback->pVdecFramePool = m_framePool;
          lynStreamAddCallback(
              postStream,
              [](void *data) -> int {
                RESULT_BOXINFO_T2 *resultCallback = (RESULT_BOXINFO_T2 *)data;
                bool eos = resultCallback->frame->eos;
                resultCallback->pVdecFramePool->Push(resultCallback->frame);
                resultCallback->encode->EncodeToJson(resultCallback->boxInfo,
                                                     eos);
                resultCallback->encode->EncodeToJson(resultCallback->boxInfo2,
                                                     eos);
                delete resultCallback;
                return 0;
              },
              resultCallback);
        }
        iBatchIndex = 0;
      }
    }

    // 16. 等待流中事件处理完，并销毁资源
    CHECK_ERR(lynSynchronizeStream(ipeStream));
    CHECK_ERR(lynSynchronizeStream(apuStream));
    CHECK_ERR(lynSynchronizeStream(postStream));
    CHECK_ERR(lynDestroyStream(ipeStream));
    CHECK_ERR(lynDestroyStream(apuStream));
    CHECK_ERR(lynDestroyStream(postStream));
    CHECK_ERR(lynDestroyEvent(ipeEvent));
    CHECK_ERR(lynDestroyEvent(apuEvent));
    CHECK_ERR(lynPluginUnregister(postplugin));   // Unregister plugin
    CHECK_ERR(lynPluginUnregister(postplugin2));  // Unregister plugin
    CHECK_ERR(lynPluginUnregister(osdplugin));    // Unregister plugin
    CHECK_ERR(lynFree(pDevBoxesInfo));
    CHECK_ERR(lynFree(pDevBoxesInfo2));

    if (channelInfo.showType == ShowType::DirectShow) {
      // OpencvWindowManager::GetInstance()->Close(strWindowName);
    } else if (channelInfo.showType == ShowType::SaveFile) {
      videoEncoder->UnInit();
      delete videoEncoder;
      videoEncoder = nullptr;
    } else if (channelInfo.showType == ShowType::OnlyApu) {
      boxInfoEncode->UnInit();
      delete boxInfoEncode;
      boxInfoEncode = nullptr;
    }
    videoDecoder.Stop();
    videoDecoder.UnInit();
    yolov5Info.UnLoadModel();
  }
};
