/**
 * @file Channel.hpp
 * @author SDK_TEAM
 * @brief
 * @version 0.1
 * @date 2022-12-01
 *
 * Copyright:
 * © 2018 北京灵汐科技有限公司 版权所有。
 * 注意：以下内容均为北京灵汐科技有限公司原创，未经本公司允许，不得转载，否则将视为侵权；对于不遵守此声明或者其他违法使用以下内容者，本公司依法保留追究权。
 * © 2018 Lynxi Technologies Co., Ltd. All rights reserved.
 * NOTICE: All information contained here is, and remains the property of Lynxi.
 * This file can not be copied or distributed without the permission of Lynxi
 * Technologies Co., Ltd.
 *
 */

#pragma once

#include <lyn_api.h>
#include <lyn_plugin.h>
#include <unistd.h>
#include <iostream>
#include "batchMem.hpp"
#include "blockQueue.hpp"
#include "boxInfoEncode.h"
#include "bufferPool.hpp"
#include "framePool.hpp"
#include "ipeParamYolov8.h"
#include "opencvWindow.hpp"
// #include "osd_plugin.h"
#include <dlfcn.h>
#include "nlohmann/json.hpp"
#include "osdPlugin.h"
#include "params.hpp"
#include "segment_post_process.h"
#include "statsInfo.h"
#include "sys/time.h"
#include "util.hpp"
#include "videoDecoder.h"
#include "videoEncoder.h"

// 统计信息
#define YOLOV8S_SEG_STATS_FRAME_RATE 0
#define YOLOV8S_SEG_STATS_DECODE_TIME 1
#define YOLOV8S_SEG_STATS_PREPROCESS_TIME 2
#define YOLOV8S_SEG_STATS_INFER_TIME 3
#define YOLOV8S_SEG_STATS_POSTPROCESS_TIME 4
#define YOLOV8S_SEG_STATS_ENCODE_TIME 5

using SEGMENT_POST_FUNC = int (*)(SegmentPostProcessInfo_t *);

// 设备context结构体，用于设备对应的相关资源存储
typedef struct {
  lynContext_t ctx;        // context 句柄
  const char *pModelPath;  // 模型路径
} DEVICE_CONTEXT_T;

struct ChannelInfo {
  std::string inputPath;
  std::string outputPath;
  std::string channelName;
  std::string modelPath;
  std::string labelFile;
  std::string argPostPluginPath;
  int deviceID;
  lynContext_t *context;
  InputType inputType;
  int maxFps;
};

struct Channel {
  std::string m_path = "";
  VideoDecoder m_decoder;
  BlockQueue<lynFrame_t *> m_queue;  //专门给显示用的队列
  FramePool *m_framePool;            //三种模式下公用的对象池
  lynContext_t *m_context;
  lynVdecOutInfo_t m_videoInfo;
  std::thread *m_thread;
  std::string m_channelName;
  StatsInfo *m_statsInfo;

  Channel() : m_queue(5), m_statsInfo(nullptr) {}

  ~Channel() {
    if (m_framePool) {
      CHECK_ERR(lynSetCurrentContext(*m_context));
      delete m_framePool;
    }
  }

  bool Init(ChannelInfo &channelInfo) {
    m_channelName = channelInfo.channelName;
    m_context = channelInfo.context;
    map<uint32_t, std::string> Yolov8s_Seg_Stats;
    Yolov8s_Seg_Stats[YOLOV8S_SEG_STATS_FRAME_RATE] = "frame rate(fps)";
    if (argPrintStats != 0) {
      Yolov8s_Seg_Stats[YOLOV8S_SEG_STATS_DECODE_TIME] = "decode cost time(ms)";
      Yolov8s_Seg_Stats[YOLOV8S_SEG_STATS_PREPROCESS_TIME] = "preprocess cost time(ms)";
      Yolov8s_Seg_Stats[YOLOV8S_SEG_STATS_INFER_TIME] = "infer cost time(ms)";
      Yolov8s_Seg_Stats[YOLOV8S_SEG_STATS_POSTPROCESS_TIME] = "postprocess cost time(ms)";
    }
    m_statsInfo = new StatsInfo(m_channelName, Yolov8s_Seg_Stats);
    m_thread = new std::thread(&Channel::ThreadFunc, this, channelInfo);
    return true;
  }

  void Close() {
    if (m_thread != nullptr) {
      m_thread->join();
      delete m_thread;
      m_thread = nullptr;
    }
    if (m_statsInfo) {
      m_statsInfo->StopPrint();
      delete m_statsInfo;
    }
  }

  void putFrame(lynFrame_t *frame) { m_framePool->Push(frame); }

  bool getFrame(lynFrame_t **frame, int timeout = 1000) {
    bool ret = m_queue.take(*frame, timeout);
    if (!ret) {
      // std::cout << "failed to take frame to video!" << std::endl;
    }
    return ret;
  }

  void getVideoInfo(lynVdecOutInfo_t &videoInfo) { videoInfo = m_videoInfo; }

  void ThreadFunc(ChannelInfo channelInfo) {
    CHECK_ERR(lynSetCurrentContext(*channelInfo.context));

    // 1. 加载模型
    ModelInfo yolov8ModelInfo;
    yolov8ModelInfo.LoadModelByPath(channelInfo.modelPath.c_str());

    // 2. 根据传入的ShowType，生成输出文件路径，并删除上次运行输出文件
    std::string outPath = channelInfo.outputPath;
    if (argShowType == ShowType::SaveFile) {
      outPath += ".264";
      std::remove(outPath.c_str());
    } else if (argShowType == ShowType::OnlyApu) {
      outPath += "_BoxInfo.json";
      std::remove(outPath.c_str());
    }

    // 3. 初始化视频解码类，并获取输出信息
    VideoDecoder videoDecoder;
    videoDecoder.Init(channelInfo.inputPath, channelInfo.inputType,
                      channelInfo.maxFps);
    lynVdecOutInfo_t vdecOutInfo;
    videoDecoder.GetVdecOutInfo(&vdecOutInfo);
    m_videoInfo = vdecOutInfo;
    uint32_t nVdecOutSize = vdecOutInfo.predictBufSize;

    // 4. 创建 stream 与 event
    lynStream_t ipeStream = nullptr;
    lynStream_t apuStream = nullptr;
    lynStream_t postStream = nullptr;
    CHECK_ERR(lynCreateStream(&ipeStream));
    CHECK_ERR(lynCreateStream(&apuStream));
    CHECK_ERR(lynCreateStream(&postStream));
    lynEvent_t ipeEvent = nullptr;
    lynEvent_t apuEvent = nullptr;
    CHECK_ERR(lynCreateEvent(&ipeEvent));
    CHECK_ERR(lynCreateEvent(&apuEvent));

    // 5. 创建各个资源池，避免重复申请与释放
    void *apuBuffer;
    BufferPool apuBufferPool(
        yolov8ModelInfo.outputSize * yolov8ModelInfo.batchSize, 5);
    BatchMem *pIpeOutBuf = nullptr;
    BatchMemPool oIpeOutMemPool(false, yolov8ModelInfo.inputSize,
                                yolov8ModelInfo.batchSize, 5);
    BlockQueue<lynFrame_t *> blockQueue(5);
    m_framePool = new FramePool(nVdecOutSize, 5);
    FramePool vencRecvFramePool(nVdecOutSize, 5);

    // 6. 开启解码线程
    videoDecoder.Start(*channelInfo.context, std::ref(blockQueue),
                       std::ref(*m_framePool));

    // 7. 初始化 IPE 处理类
    IpeParamYolov8 ipeParam(yolov8ModelInfo.width, yolov8ModelInfo.height);
    ipeParam.SetImgInfo(vdecOutInfo.width, vdecOutInfo.height,
                        LYN_PIX_FMT_NV12);

    // 8. 加载 Plugin, 并设置后处理参数
    void *handle =
        dlopen(channelInfo.argPostPluginPath.c_str(), RTLD_NOW | RTLD_GLOBAL);
    if (handle == nullptr) {
      cout << "error : post plugin load fail. error:" << dlerror() << endl;
      return;
    }
    SEGMENT_POST_FUNC segmentPostFunc =
        (SEGMENT_POST_FUNC)dlsym(handle, "lynSegmentPostProcess");
    if (segmentPostFunc == nullptr) {
      cout << "error : post plugin has no lynSegmentPostProcess function."
           << endl;
      dlclose(handle);
      return;
    }

    lynPlugin_t osdPlugin;
    if (argShowType != OnlyApu) {
      CHECK_ERR(lynPluginRegister(&osdPlugin, argOsdPluginPath.c_str()));
    }

    // apu输出数据
    uint8_t *pHostApuOutData = new uint8_t[yolov8ModelInfo.outputSize];

    // 后处理参数
    SegmentPostProcessInfo_t post_info;
    memset(&post_info, 0, sizeof(SegmentPostProcessInfo_t));
    post_info.model_width = yolov8ModelInfo.width;
    post_info.model_height = yolov8ModelInfo.height;
    post_info.img_width = vdecOutInfo.width;
    post_info.img_height = vdecOutInfo.height;
    post_info.padx = ipeParam.m_iPadX;
    post_info.pady = ipeParam.m_iPadY;
    post_info.fatio = ipeParam.m_fRatio;
    post_info.output_tensor = pHostApuOutData;
    post_info.boxesInfo = new lynSegBoxesInfo();

    // 设备侧后处理输出数据
    lynSegBoxesInfo *pDevBoxesInfo;
    uint8_t *devMaskData;
    CHECK_ERR(lynMalloc((void **)&pDevBoxesInfo, sizeof(lynSegBoxesInfo)));

    // mask后处理参数
    IpeParamC2C ipeBgr(LYN_PIX_FMT_BGR24);
    ipeBgr.SetImgInfo(vdecOutInfo.width, vdecOutInfo.height, LYN_PIX_FMT_NV12);
    IpeParamC2C ipeNv12(LYN_PIX_FMT_NV12);
    ipeNv12.SetImgInfo(vdecOutInfo.width, vdecOutInfo.height,
                       LYN_PIX_FMT_BGR24);

    uint8_t *pDevImgBgr = nullptr;
    int imgBgrSize = vdecOutInfo.width * vdecOutInfo.height * 3;
    CHECK_ERR(lynMalloc((void **)&pDevImgBgr, imgBgrSize));

    // osd参数
    MASK_PARAM_T osdMaskParam;
    osdMaskParam.imgData = pDevImgBgr;
    osdMaskParam.width = vdecOutInfo.width;
    osdMaskParam.height = vdecOutInfo.height;
    osdMaskParam.boxInfo = pDevBoxesInfo;

    lynDrawSegBoxAndTextPara osdBoxPara;
    osdBoxPara.imgFmt = LYN_PIX_FMT_NV12;
    osdBoxPara.imgW = vdecOutInfo.width;
    osdBoxPara.imgH = vdecOutInfo.height;
    osdBoxPara.boxesInfo = pDevBoxesInfo;
    osdBoxPara.boxColor = DRAW_COLOR_BLUE;
    osdBoxPara.boxThick = DRAW_THICK_2;
    osdBoxPara.fontSize = FONT_SIZE_24;
    osdBoxPara.fontColor = DRAW_COLOR_BLUE;

    // 9. 根据传入的 ShowType 决定是使用显示窗口类, 视频编码类，BoxInfo 编码类
    VideoEncoder *videoEncoder = nullptr;
    BoxInfoEncode *boxInfoEncode = nullptr;
    if (argShowType == ShowType::SaveFile) {
      videoEncoder = new VideoEncoder;
      videoEncoder->Init(vdecOutInfo, outPath, m_framePool, &vencRecvFramePool);
    } else if (argShowType == ShowType::DirectShow) {
      // OpencvWindowManager::GetInstance()->SetWindowSize(strWindowName,
      // vdecOutInfo.height,
      //                                                   vdecOutInfo.width);
    } else if (argShowType == ShowType::OnlyApu) {
      boxInfoEncode = new BoxInfoEncode;
      boxInfoEncode->Init(outPath);
    }

    bool bEos = false;
    int iBatchIndex = 0;
    bool resetEncoder = false;
    m_statsInfo->StartPrint();
    while (!bEos) {
      // 10. 取出一个解码 Frame
      lynFrame_t *pFrameTmp;
      int ret = blockQueue.take(pFrameTmp);
      if (!ret) {
        continue;
      }

      bEos = pFrameTmp->eos;

      if (!bEos) {
        m_statsInfo->UpdateStats(YOLOV8S_SEG_STATS_DECODE_TIME, 1,
                                 (uint64_t)pFrameTmp->userPtr);
      }
      //分辨率发生变化，更新ipe参数
      if (bEos && videoDecoder.m_resolutionchange) {
        if (argShowType == ShowType::SaveFile) {
          videoEncoder->EncodeImage(pFrameTmp);
          videoEncoder->UnInit();
          resetEncoder = true;
        } else {
          m_framePool->Push(pFrameTmp);
        }
        bEos = false;
        videoDecoder.m_resolutionchange = false;
        continue;  //
      }

      videoDecoder.GetVdecOutInfo(&vdecOutInfo);
      ipeParam.SetImgInfo(vdecOutInfo.width, vdecOutInfo.height,
                          LYN_PIX_FMT_NV12);
      m_videoInfo = vdecOutInfo;

      if (resetEncoder) {
        vencRecvFramePool.ResetSize(vdecOutInfo.predictBufSize);
        videoEncoder->Init(vdecOutInfo, outPath, m_framePool,
                           &vencRecvFramePool);
        resetEncoder = false;
      }

      if (iBatchIndex == 0) {
        pIpeOutBuf = oIpeOutMemPool.GetBatchMem();
      }

      // 11. IPE 处理
      lynEvent_t ipeBeginEvt = getDeviceTimePoint(ipeStream);
      ipeParam.CalcParam(ipeStream, pFrameTmp->data, pIpeOutBuf->GetElement());
      lynEvent_t ipeEndEvt = getDeviceTimePoint(ipeStream);

      auto ipeCallback = [this, ipeBeginEvt, ipeEndEvt]() {
        float elapsedTime = getEventElapsedTime(ipeBeginEvt, ipeEndEvt);
        m_statsInfo->UpdateStats(YOLOV8S_SEG_STATS_PREPROCESS_TIME, 1,
                                 static_cast<uint64_t>(elapsedTime * 1000));
        CHECK_ERR(lynDestroyEvent(ipeBeginEvt));
        CHECK_ERR(lynDestroyEvent(ipeEndEvt));
      };
      SetCallback(ipeStream, ipeCallback);

      ++iBatchIndex;

      // 12. IPE输出满了或是最后一个包，进行apu处理
      if (iBatchIndex == yolov8ModelInfo.batchSize || bEos) {
        CHECK_ERR(lynRecordEvent(ipeStream, ipeEvent));
        CHECK_ERR(lynStreamWaitEvent(apuStream, ipeEvent));
        apuBuffer = apuBufferPool.Pop();
        // 13. 调用 APU 推理接口
        lynEvent_t apuBeginEvt = getDeviceTimePoint(apuStream);
        CHECK_ERR(lynExecuteModelAsync(apuStream, yolov8ModelInfo.model,
                                       pIpeOutBuf->Buffer(), apuBuffer,
                                       yolov8ModelInfo.batchSize));
        lynEvent_t apuEndEvt = getDeviceTimePoint(apuStream);

        auto inferCallback = [this, apuBeginEvt, apuEndEvt, iBatchIndex]() {
          float elapsedTime = getEventElapsedTime(apuBeginEvt, apuEndEvt);
          m_statsInfo->UpdateStats(YOLOV8S_SEG_STATS_INFER_TIME, iBatchIndex,
                                   static_cast<uint64_t>(elapsedTime * 1000));
          CHECK_ERR(lynDestroyEvent(apuBeginEvt));
          CHECK_ERR(lynDestroyEvent(apuEndEvt));
        };
        SetCallback(apuStream, inferCallback);

        // 14. 在 APU 推理完之后，调用 Plugin 进行后处理
        CHECK_ERR(lynMemcpyAsync(apuStream, pHostApuOutData, apuBuffer,
                            yolov8ModelInfo.outputSize, ServerToClient));
        lynSynchronizeStream(apuStream);

        uint64_t postBegin = getCurrentTimestamp();
        segmentPostFunc(&post_info);
        CHECK_ERR(lynMemcpy(pDevBoxesInfo, post_info.boxesInfo,
                            sizeof(lynSegBoxesInfo), ClientToServer));
        uint64_t postEnd = getCurrentTimestamp();
        m_statsInfo->UpdateStats(YOLOV8S_SEG_STATS_POSTPROCESS_TIME, 1, postEnd-postBegin);

        // 后处理完成
        apuBufferPool.Push(apuBuffer);
        oIpeOutMemPool.PutBatchMem(pIpeOutBuf);
        m_statsInfo->UpdateStats(YOLOV8S_SEG_STATS_FRAME_RATE, 1, 0);

        // 选择 OnlyApu 时，不进行画框处理
        if (argShowType != ShowType::OnlyApu) {
          devMaskData = nullptr;
          if (post_info.boxesInfo->maskDataLen > 0) {
            CHECK_ERR(lynMalloc((void **)&devMaskData,
                                post_info.boxesInfo->maskDataLen));
            CHECK_ERR(lynMemcpy(devMaskData, post_info.boxesInfo->maskData,
                                post_info.boxesInfo->maskDataLen,
                                ClientToServer));
            delete[] post_info.boxesInfo->maskData;
          }

          osdBoxPara.imgData = pFrameTmp->data;
          CHECK_ERR(lynPluginRunAsync(
              postStream, osdPlugin, "lynDrawSegBoxAndText", &osdBoxPara, sizeof(osdBoxPara)));

          // nv12tobgr
          ipeBgr.CalcParam(postStream, pFrameTmp->data, pDevImgBgr);
          osdMaskParam.maskData = devMaskData;
          CHECK_ERR(lynPluginRunAsync(postStream, osdPlugin, "maskDraw",
                                      &osdMaskParam, sizeof(osdMaskParam)));
          // bgrtonv12
          ipeNv12.CalcParam(postStream, pDevImgBgr, pFrameTmp->data);

          auto osdCallback = [this, devMaskData]() {
            if (devMaskData) {
              CHECK_ERR(lynFree(devMaskData));
            }
          };
          SetCallback(postStream, osdCallback);
        }

        // 15. 根据传入的 ShowType 决定是进行视频编码还是直接显示
        if (argShowType == ShowType::SaveFile) {
          // 视频编码
          pFrameTmp->eos = bEos;
          videoEncoder->WaitForStream(postStream);
          videoEncoder->EncodeImage(pFrameTmp);
        } else if (argShowType == ShowType::DirectShow) {
          auto showCallback = [this, pFrameTmp]() { m_queue.put(pFrameTmp); };
          SetCallback(postStream, showCallback);
        } else if (argShowType == ShowType::OnlyApu) {
          // 保存推理结果
          // lynSegBoxesInfo *boxInfo = new lynSegBoxesInfo;
          // 将推理结果拷贝至 Host 侧
          // lynMemcpyAsync(postStream, boxInfo,
          //                post_info.boxesInfo, sizeof(lynSegBoxesInfo),
          //                ServerToClient);
          // auto saveCallback = [this, pFrameTmp, boxInfo, boxInfoEncode]() {
          //   m_framePool->Push(pFrameTmp);
          //   boxInfoEncode->EncodeToJson(*boxInfo, pFrameTmp->eos);
          //   delete boxInfo;
          // };
          // SetCallback(postStream, saveCallback);
        }
        iBatchIndex = 0;
      }
    }

    m_statsInfo->StopPrint();
    // 16. 等待流中事件处理完，并销毁资源
    CHECK_ERR(lynSynchronizeStream(ipeStream));
    CHECK_ERR(lynSynchronizeStream(apuStream));
    CHECK_ERR(lynSynchronizeStream(postStream));
    CHECK_ERR(lynDestroyStream(ipeStream));
    CHECK_ERR(lynDestroyStream(apuStream));
    CHECK_ERR(lynDestroyStream(postStream));
    CHECK_ERR(lynDestroyEvent(ipeEvent));
    CHECK_ERR(lynDestroyEvent(apuEvent));
    // CHECK_ERR(lynPluginUnregister(postPlugin));  // Unregister plugin
    dlclose(handle);
    if (argShowType != OnlyApu) {
      CHECK_ERR(lynPluginUnregister(osdPlugin));  // Unregister plugin
    }
    CHECK_ERR(lynFree(pDevBoxesInfo));
    delete post_info.boxesInfo;
    CHECK_ERR(lynFree(pDevImgBgr));
    // CHECK_ERR(lynFree(devLabels));
    // delete[] labelList.labels;

    if (argShowType == ShowType::DirectShow) {
      // OpencvWindowManager::GetInstance()->Close(strWindowName);
    } else if (argShowType == ShowType::SaveFile) {
      videoEncoder->UnInit();
      delete videoEncoder;
      videoEncoder = nullptr;
    } else if (argShowType == ShowType::OnlyApu) {
      boxInfoEncode->UnInit();
      delete boxInfoEncode;
      boxInfoEncode = nullptr;
    }
    videoDecoder.Stop();
    videoDecoder.UnInit();
    yolov8ModelInfo.UnLoadModel();
  }
};
