/**
 * @file Channel.hpp
 * @author SDK_TEAM
 * @brief
 * @version 0.1
 * @date 2022-12-01
 *
 * Copyright:
 * © 2018 北京灵汐科技有限公司 版权所有。
 * 注意：以下内容均为北京灵汐科技有限公司原创，未经本公司允许，不得转载，否则将视为侵权；对于不遵守此声明或者其他违法使用以下内容者，本公司依法保留追究权。
 * © 2018 Lynxi Technologies Co., Ltd. All rights reserved.
 * NOTICE: All information contained here is, and remains the property of Lynxi.
 * This file can not be copied or distributed without the permission of Lynxi
 * Technologies Co., Ltd.
 *
 */

#pragma once

#include <lyn_api.h>
#include <lyn_plugin.h>
#include <unistd.h>
#include <iostream>
#include "batchMem.hpp"
#include "blockQueue.hpp"
#include "boxInfoEncode.h"
#include "bufferPool.hpp"
#include "framePool.hpp"
#include "ipeParamYolov5s6.h"
#include "opencvWindow.hpp"
#include "osd_plugin.h"
#include "params.hpp"
#include "sys/time.h"
#include "util.hpp"
#include "videoDecoder.h"
#include "videoEncoder.h"
#include "yolox_post_process.h"
#include "nlohmann/json.hpp"
#include "statsInfo.h"

// 统计信息
#define YOLOV5S6_STATS_FRAME_RATE               0
#define YOLOV5S6_STATS_DECODE_TIME              1
#define YOLOV5S6_STATS_PREPROCESS_TIME          2
#define YOLOV5S6_STATS_INFER_TIME               3
#define YOLOV5S6_STATS_POSTPROCESS_TIME         4
#define YOLOV5S6_STATS_ENCODE_TIME              5

// 设备context结构体，用于设备对应的相关资源存储
typedef struct {
  lynContext_t ctx;        // context 句柄
  const char *pModelPath;  // 模型路径
} DEVICE_CONTEXT_T;

struct ChannelInfo {
  std::string inputPath;
  std::string outputPath;
  std::string channelName;
  std::string modelPath;
  std::string labelFile;
  std::string argPostPluginPath;
  int deviceID;
  lynContext_t *context;
  InputType inputType;
  int maxFps;
};

struct Channel {
  std::string m_path = "";
  VideoDecoder m_decoder;
  BlockQueue<lynFrame_t *> m_queue;  //专门给显示用的队列
  FramePool *m_framePool;            //三种模式下公用的对象池
  lynContext_t *m_context;
  lynVdecOutInfo_t m_videoInfo;
  std::thread *m_thread;
  std::string m_channelName;
  StatsInfo *m_statsInfo;

  Channel() : m_queue(5), m_statsInfo(nullptr) {}

  ~Channel() {
    if (m_framePool) {
      CHECK_ERR(lynSetCurrentContext(*m_context));
      delete m_framePool;
    }
  }

  bool Init(ChannelInfo &channelInfo) {
    m_channelName = channelInfo.channelName;
    m_context = channelInfo.context;
    map<uint32_t, std::string> Yolov3_Stats;
    Yolov3_Stats[YOLOV5S6_STATS_FRAME_RATE] = "frame rate(fps)";
    if (argPrintStats != 0) {
      Yolov3_Stats[YOLOV5S6_STATS_DECODE_TIME] = "decode cost time(ms)";
      Yolov3_Stats[YOLOV5S6_STATS_PREPROCESS_TIME] = "preprocess cost time(ms)";
      Yolov3_Stats[YOLOV5S6_STATS_INFER_TIME] = "infer cost time(ms)";
      Yolov3_Stats[YOLOV5S6_STATS_POSTPROCESS_TIME] = "postprocess cost time(ms)";
    }
    m_statsInfo = new StatsInfo(m_channelName, Yolov3_Stats);
    m_thread = new std::thread(&Channel::ThreadFunc, this, channelInfo);
    return true;
  }

  void Close() {
    if (m_thread != nullptr) {
      m_thread->join();
      delete m_thread;
      m_thread = nullptr;
    }
    if (m_statsInfo) {
      m_statsInfo->StopPrint();
      delete m_statsInfo;
    }
  }

  void putFrame(lynFrame_t *frame) { m_framePool->Push(frame); }

  bool getFrame(lynFrame_t **frame, int timeout = 1000) {
    bool ret = m_queue.take(*frame, timeout);
    if (!ret) {
      // std::cout << "failed to take frame to video!" << std::endl;
    }
    return ret;
  }

  void getVideoInfo(lynVdecOutInfo_t &videoInfo) { videoInfo = m_videoInfo; }

  void ThreadFunc(ChannelInfo channelInfo) {
    CHECK_ERR(lynSetCurrentContext(*channelInfo.context));

    ifstream fJson(channelInfo.labelFile);
    stringstream ss;
    ss << fJson.rdbuf();
    nlohmann::json js = nlohmann::json::parse(ss.str());

    int labelCount = js.size();
    if (labelCount == 0) {
      cout << channelInfo.channelName << " error : label file is invalid, exit."
           << endl;
      return;
    }

    LabelList labelList;
    labelList.labelNum = labelCount;
    labelList.labels = new LabelInfo[labelCount];
    memset(labelList.labels, 0, sizeof(LabelInfo) * labelCount);
    string labelName;
    for (int i = 0; i < labelCount; ++i) {
      labelName = js[i]["desc"].get<string>();
      strncpy(labelList.labels[i].name, labelName.c_str(),
              sizeof(labelList.labels[i].name) - 1);
    }

    // 1. 加载模型
    ModelInfo yolov5s6Info;
    yolov5s6Info.LoadModelByPath(channelInfo.modelPath.c_str());

    // 2. 根据传入的ShowType，生成输出文件路径，并删除上次运行输出文件
    std::string outPath = channelInfo.outputPath;
    if (argShowType == ShowType::SaveFile) {
      outPath += ".264";
      std::remove(outPath.c_str());
    } else if (argShowType == ShowType::OnlyApu) {
      outPath += "_BoxInfo.json";
      std::remove(outPath.c_str());
    }

    // 3. 初始化视频解码类，并获取输出信息
    VideoDecoder videoDecoder;
    videoDecoder.Init(channelInfo.inputPath, channelInfo.inputType,
                      channelInfo.maxFps);
    lynVdecOutInfo_t vdecOutInfo;
    videoDecoder.GetVdecOutInfo(&vdecOutInfo);
    m_videoInfo = vdecOutInfo;
    uint32_t nVdecOutSize = vdecOutInfo.predictBufSize;

    // 4. 创建 stream 与 event
    lynStream_t ipeStream = nullptr;
    lynStream_t apuStream = nullptr;
    lynStream_t postStream = nullptr;
    CHECK_ERR(lynCreateStream(&ipeStream));
    CHECK_ERR(lynCreateStream(&apuStream));
    CHECK_ERR(lynCreateStream(&postStream));
    lynEvent_t ipeEvent = nullptr;
    lynEvent_t apuEvent = nullptr;
    CHECK_ERR(lynCreateEvent(&ipeEvent));
    CHECK_ERR(lynCreateEvent(&apuEvent));

    // 5. 创建各个资源池，避免重复申请与释放
    void *apuBuffer;
    BufferPool apuBufferPool(yolov5s6Info.outputSize * yolov5s6Info.batchSize, 5);
    BatchMem *pIpeOutBuf = nullptr;
    BatchMemPool oIpeOutMemPool(false, yolov5s6Info.inputSize,
                                yolov5s6Info.batchSize, 5);
    BlockQueue<lynFrame_t *> blockQueue(5);
    m_framePool = new FramePool(nVdecOutSize, 5);
    FramePool vencRecvFramePool(nVdecOutSize, 5);

    // 6. 开启解码线程
    videoDecoder.Start(*channelInfo.context, std::ref(blockQueue),
                       std::ref(*m_framePool));

    // 7. 加载 Plugin, 并设置后处理参数
    lynPlugin_t postPlugin;
    CHECK_ERR(
        lynPluginRegister(&postPlugin, channelInfo.argPostPluginPath.c_str()));

    lynPlugin_t osdPlugin;
    if (argShowType != OnlyApu) {
      CHECK_ERR(lynPluginRegister(&osdPlugin, argOsdPluginPath.c_str()));
    }

    YoloxPostProcessInfo_t post_info;
    memset(&post_info, 0, sizeof(YoloxPostProcessInfo_t));
    post_info.is_pad_resize = 1;
    post_info.score_threshold = 0.25;
    post_info.nms_threshold = 0.45;
    post_info.nms_top_k = 500;
    post_info.anchorSize = 3;
    post_info.width = yolov5s6Info.width;
    post_info.height = yolov5s6Info.height;
    post_info.ori_width = vdecOutInfo.width;
    post_info.ori_height = vdecOutInfo.height;
    post_info.class_num = labelCount;
    LabelInfo *devLabels = nullptr;
    CHECK_ERR(
        lynMalloc((void **)&devLabels, sizeof(LabelInfo) * labelList.labelNum));
    CHECK_ERR(lynMemcpy(devLabels, labelList.labels,
                        sizeof(LabelInfo) * labelList.labelNum,
                        ClientToServer));
    post_info.labelList.labelNum = labelList.labelNum;
    post_info.labelList.labels = devLabels;
    lynBoxesInfo *pDevBoxesInfo;
    CHECK_ERR(lynMalloc((void **)&pDevBoxesInfo, sizeof(lynBoxesInfo)));
    CHECK_ERR(lynMemset(pDevBoxesInfo, 0, sizeof(lynBoxesInfo)));
    post_info.boxesInfo = (lynBoxesInfo *)pDevBoxesInfo;

    // 8. 初始化 IPE 处理类
    IpeParamYolov5s6 ipeParam(yolov5s6Info.width, yolov5s6Info.height);
    ipeParam.SetImgInfo(vdecOutInfo.width, vdecOutInfo.height,
                        LYN_PIX_FMT_NV12);

    // 9. 根据传入的 ShowType 决定是使用显示窗口类, 视频编码类，BoxInfo 编码类
    VideoEncoder *videoEncoder = nullptr;
    BoxInfoEncode *boxInfoEncode = nullptr;
    if (argShowType == ShowType::SaveFile) {
      videoEncoder = new VideoEncoder;
      videoEncoder->Init(vdecOutInfo, outPath, m_framePool, &vencRecvFramePool);
    } else if (argShowType == ShowType::DirectShow) {
      // OpencvWindowManager::GetInstance()->SetWindowSize(strWindowName,
      // vdecOutInfo.height,
      //                                                   vdecOutInfo.width);
    } else if (argShowType == ShowType::OnlyApu) {
      boxInfoEncode = new BoxInfoEncode;
      boxInfoEncode->Init(outPath);
    }

    bool bEos = false;
    int iBatchIndex = 0;
    bool resetEncoder = false;
    m_statsInfo->StartPrint();
    while (!bEos) {
      // 10. 取出一个解码 Frame
      lynFrame_t *pFrameTmp;
      int ret = blockQueue.take(pFrameTmp);
      if (!ret) {
        continue;
      }

      bEos = pFrameTmp->eos;

      if (!bEos) {
        m_statsInfo->UpdateStats(YOLOV5S6_STATS_DECODE_TIME, 1, (uint64_t)pFrameTmp->userPtr);
      }
      //分辨率发生变化，更新ipe参数
      if (bEos && videoDecoder.m_resolutionchange) {
        if (argShowType == ShowType::SaveFile) {
          videoEncoder->EncodeImage(pFrameTmp);
          videoEncoder->UnInit();
          resetEncoder = true;
        } else {
          m_framePool->Push(pFrameTmp);
        }
        bEos = false;
        videoDecoder.m_resolutionchange = false;
        continue;  //
      }

      videoDecoder.GetVdecOutInfo(&vdecOutInfo);
      ipeParam.SetImgInfo(vdecOutInfo.width, vdecOutInfo.height,
                          LYN_PIX_FMT_NV12);
      m_videoInfo = vdecOutInfo;

      if (resetEncoder) {
        vencRecvFramePool.ResetSize(vdecOutInfo.predictBufSize);
        videoEncoder->Init(vdecOutInfo, outPath, m_framePool,
                           &vencRecvFramePool);
        resetEncoder = false;
      }

      if (iBatchIndex == 0) {
        pIpeOutBuf = oIpeOutMemPool.GetBatchMem();
        CHECK_ERR(lynMemset(pIpeOutBuf->Buffer(), 0, pIpeOutBuf->BufferSize()));
      }

      // 11. IPE 处理
      lynEvent_t ipeBeginEvt = getDeviceTimePoint(ipeStream);
      ipeParam.CalcParam(ipeStream, pFrameTmp->data, pIpeOutBuf->GetElement());
      lynEvent_t ipeEndEvt = getDeviceTimePoint(ipeStream);

      auto ipeCallback = [this, ipeBeginEvt, ipeEndEvt]() {
        float elapsedTime = getEventElapsedTime(ipeBeginEvt, ipeEndEvt);
        m_statsInfo->UpdateStats(YOLOV5S6_STATS_PREPROCESS_TIME, 1, static_cast<uint64_t>(elapsedTime * 1000));
        CHECK_ERR(lynDestroyEvent(ipeBeginEvt));
        CHECK_ERR(lynDestroyEvent(ipeEndEvt));
      };
      SetCallback(ipeStream, ipeCallback);

      ++iBatchIndex;

      // 12. IPE输出满了或是最后一个包，进行apu处理
      if (iBatchIndex == yolov5s6Info.batchSize || bEos) {
        CHECK_ERR(lynRecordEvent(ipeStream, ipeEvent));
        CHECK_ERR(lynStreamWaitEvent(apuStream, ipeEvent));
        apuBuffer = apuBufferPool.Pop();
        // 13. 调用 APU 推理接口
        lynEvent_t apuBeginEvt = getDeviceTimePoint(apuStream);
        CHECK_ERR(lynExecuteModelAsync(apuStream, yolov5s6Info.model,
                                       pIpeOutBuf->Buffer(), apuBuffer,
                                       yolov5s6Info.batchSize));
        lynEvent_t apuEndEvt = getDeviceTimePoint(apuStream);

        auto inferCallback = [this, apuBeginEvt, apuEndEvt, iBatchIndex]() {
          float elapsedTime = getEventElapsedTime(apuBeginEvt, apuEndEvt);
          m_statsInfo->UpdateStats(YOLOV5S6_STATS_INFER_TIME, iBatchIndex, static_cast<uint64_t>(elapsedTime * 1000));
          CHECK_ERR(lynDestroyEvent(apuBeginEvt));
          CHECK_ERR(lynDestroyEvent(apuEndEvt));
        };
        SetCallback(apuStream, inferCallback);

        // 14. 在 APU 推理完之后，调用 Plugin 进行后处理
        CHECK_ERR(lynRecordEvent(apuStream, apuEvent));
        CHECK_ERR(lynStreamWaitEvent(postStream, apuEvent));

        post_info.output_tensor = apuBuffer;
        lynEvent_t postBeginEvt = getDeviceTimePoint(postStream);
        CHECK_ERR(lynPluginRunAsync(postStream, postPlugin, "lynYoloxPostProcess",
                                    &post_info, sizeof(post_info)));
        lynEvent_t postEndEvt = getDeviceTimePoint(postStream);

        auto postCallback = [pIpeOutBuf, &oIpeOutMemPool, apuBuffer, &apuBufferPool, this, postBeginEvt, postEndEvt]() {
          float elapsedTime = getEventElapsedTime(postBeginEvt, postEndEvt);
          m_statsInfo->UpdateStats(YOLOV5S6_STATS_POSTPROCESS_TIME, 1, static_cast<uint64_t>(elapsedTime * 1000));
          m_statsInfo->UpdateStats(YOLOV5S6_STATS_FRAME_RATE, 1, 0);
          apuBufferPool.Push(apuBuffer);
          oIpeOutMemPool.PutBatchMem(pIpeOutBuf);
          CHECK_ERR(lynDestroyEvent(postBeginEvt));
          CHECK_ERR(lynDestroyEvent(postEndEvt));
        };
        SetCallback(postStream, postCallback);

        // 选择 OnlyApu 时，不进行画框处理
        if (argShowType != ShowType::OnlyApu) {
          lynDrawBoxAndTextPara para;
          para.imgData = pFrameTmp->data;
          para.imgFmt = LYN_PIX_FMT_NV12;
          para.imgW = vdecOutInfo.width;
          para.imgH = vdecOutInfo.height;
          para.boxesInfo = pDevBoxesInfo;
          para.boxColor = DRAW_COLOR_BLUE;
          para.boxThick = DRAW_THICK_2;
          para.fontSize = FONT_SIZE_24;
          para.fontColor = DRAW_COLOR_BLUE;
          CHECK_ERR(lynPluginRunAsync(
              postStream, osdPlugin, "lynDrawBoxAndText", &para, sizeof(para)));
        }
        // 15. 根据传入的 ShowType 决定是进行视频编码还是直接显示
        if (argShowType == ShowType::SaveFile) {
          // 视频编码
          pFrameTmp->eos = bEos;
          videoEncoder->WaitForStream(postStream);
          videoEncoder->EncodeImage(pFrameTmp);
        } else if (argShowType == ShowType::DirectShow) {
          auto showCallback = [this, pFrameTmp]() {
            m_queue.put(pFrameTmp);
          };
          SetCallback(postStream, showCallback);
        } else if (argShowType == ShowType::OnlyApu) {
          // 保存推理结果
          lynBoxesInfo *boxInfo = new lynBoxesInfo;
          // 将推理结果拷贝至 Host 侧
          lynMemcpyAsync(postStream, boxInfo,
                         post_info.boxesInfo, sizeof(lynBoxesInfo),
                         ServerToClient);
          auto saveCallback = [this, pFrameTmp, boxInfo, boxInfoEncode]() {
            boxInfoEncode->EncodeToJson(*boxInfo, pFrameTmp->eos);
            delete boxInfo;
            m_framePool->Push(pFrameTmp);
          };
          SetCallback(postStream, saveCallback);
        }
        iBatchIndex = 0;
      }
    }

    m_statsInfo->StopPrint();
    // 16. 等待流中事件处理完，并销毁资源
    CHECK_ERR(lynSynchronizeStream(ipeStream));
    CHECK_ERR(lynSynchronizeStream(apuStream));
    CHECK_ERR(lynSynchronizeStream(postStream));
    CHECK_ERR(lynDestroyStream(ipeStream));
    CHECK_ERR(lynDestroyStream(apuStream));
    CHECK_ERR(lynDestroyStream(postStream));
    CHECK_ERR(lynDestroyEvent(ipeEvent));
    CHECK_ERR(lynDestroyEvent(apuEvent));
    CHECK_ERR(lynPluginUnregister(postPlugin));  // Unregister plugin
    if (argShowType != OnlyApu) {
      CHECK_ERR(lynPluginUnregister(osdPlugin));   // Unregister plugin
    }
    CHECK_ERR(lynFree(post_info.boxesInfo));
    CHECK_ERR(lynFree(devLabels));
    delete[] labelList.labels;

    if (argShowType == ShowType::DirectShow) {
      // OpencvWindowManager::GetInstance()->Close(strWindowName);
    } else if (argShowType == ShowType::SaveFile) {
      videoEncoder->UnInit();
      delete videoEncoder;
      videoEncoder = nullptr;
    } else if (argShowType == ShowType::OnlyApu) {
      boxInfoEncode->UnInit();
      delete boxInfoEncode;
      boxInfoEncode = nullptr;
    }
    videoDecoder.Stop();
    videoDecoder.UnInit();
    yolov5s6Info.UnLoadModel();
  }
};
