/**
 * @file Channel.hpp
 * @author SDK_TEAM
 * @brief
 * @version 0.1
 * @date 2022-12-01
 *
 * Copyright:
 * © 2018 北京灵汐科技有限公司 版权所有。
 * 注意：以下内容均为北京灵汐科技有限公司原创，未经本公司允许，不得转载，否则将视为侵权；对于不遵守此声明或者其他违法使用以下内容者，本公司依法保留追究权。
 * © 2018 Lynxi Technologies Co., Ltd. All rights reserved.
 * NOTICE: All information contained here is, and remains the property of Lynxi.
 * This file can not be copied or distributed without the permission of Lynxi
 * Technologies Co., Ltd.
 *
 */

#pragma once

#include <lyn_api.h>
#include <lyn_plugin.h>
#include <unistd.h>
#include <iostream>
#include "argsParser.h"
#include "batchMem.hpp"
#include "blockQueue.hpp"
#include "boxInfoEncode.h"
#include "bufferPool.hpp"
#include "framePool.hpp"
#include "frameRater.h"
#include "ipeParamModel.h"
#include "opencvWindow.hpp"
#include "osd_plugin.h"
#include "sys/time.h"
#include "util.hpp"
#include "videoDecoder.h"
#include "videoEncoder.h"
#include "yolox_post_process.h"

typedef struct {
  BatchMemPool *pIpePool;     // IPE 内存池对象
  BatchMem *pIpeBuffer;       // IPE 批量内存对象
  void *apuBuffer;            // APU 推理结果地址
  BufferPool *apuBufferPool;  // APU BufferPool
  FrameRater *pFrameRater;    // 帧率计算
} PLUGIN_DATA_T;

typedef struct {
  lynFrame_t *pVdecFrame;     // 解码 Frame
  FramePool *pVdecFramePool;  // Frame Pool
  std::string windowName;
  BlockQueue<lynFrame_t *> *frameQueue;
} SHOW_DATA_T;

// 设备context结构体，用于设备对应的相关资源存储
typedef struct {
  lynContext_t ctx;        // context 句柄
  const char *pModelPath;  // 模型路径
} DEVICE_CONTEXT_T;

struct ChannelInfo {
  std::string inputPath;
  std::string outputPath;
  std::string channelName;
  std::string modelPath;
  std::string postPlugPath;
  std::string osdPlugPath;
  int deviceID;
  lynContext_t *context;
  ShowType showType;
  InputType inputType;
  int maxFps;
};

struct Channel {
  std::string m_path = "";
  VideoDecoder m_decoder;
  BlockQueue<lynFrame_t *> m_queue;  //专门给显示用的队列
  FramePool *m_framePool;            //三种模式下公用的对象池
  lynContext_t *m_context;
  lynVdecOutInfo_t m_videoInfo;
  std::thread *m_thread;
  std::string m_channelName;

  Channel() : m_queue(5) {}

  ~Channel() {
    if (m_framePool) {
      CHECK_ERR(lynSetCurrentContext(*m_context));
      delete m_framePool;
    }
  }

  bool Init(ChannelInfo &channelInfo) {
    m_channelName = channelInfo.channelName;
    m_context = channelInfo.context;
    m_thread = new std::thread(&Channel::ThreadFunc, this, channelInfo);
    return true;
  }

  void Close() {
    if (m_thread != nullptr) {
      m_thread->join();
      delete m_thread;
      m_thread = nullptr;
    }
  }

  void putFrame(lynFrame_t *frame) { m_framePool->Push(frame); }

  bool getFrame(lynFrame_t **frame, int timeout = 1000) {
    bool ret = m_queue.take(*frame, timeout);
    if (!ret) {
      // std::cout << "failed to take frame to video!" << std::endl;
    }
    return ret;
  }

  void getVideoInfo(lynVdecOutInfo_t &videoInfo) { videoInfo = m_videoInfo; }

  void ThreadFunc(ChannelInfo channelInfo) {
    CHECK_ERR(lynSetCurrentContext(*channelInfo.context));

    // 1. 加载模型
    ModelInfo modelInfo;
    modelInfo.LoadModelByPath(channelInfo.modelPath.c_str());

    // 2. 根据传入的ShowType，生成输出文件路径，并删除上次运行输出文件
    std::string outPath = channelInfo.outputPath;
    if (channelInfo.showType == ShowType::SaveFile) {
      outPath += ".264";
      std::remove(outPath.c_str());
    } else if (channelInfo.showType == ShowType::OnlyApu) {
      outPath += "_BoxInfo.json";
      std::remove(outPath.c_str());
    }

    // 3. 初始化视频解码类，并获取输出信息
    VideoDecoder videoDecoder;
    videoDecoder.Init(channelInfo.inputPath, channelInfo.inputType,
                      channelInfo.maxFps);
    lynVdecOutInfo_t vdecOutInfo;
    videoDecoder.GetVdecOutInfo(&vdecOutInfo);
    m_videoInfo = vdecOutInfo;
    uint32_t nVdecOutSize = vdecOutInfo.predictBufSize;

    // 4. 创建 stream 与 event
    lynStream_t ipeStream = nullptr;
    lynStream_t apuStream = nullptr;
    lynStream_t postStream = nullptr;
    CHECK_ERR(lynCreateStream(&ipeStream));
    CHECK_ERR(lynCreateStream(&apuStream));
    CHECK_ERR(lynCreateStream(&postStream));
    lynEvent_t ipeEvent = nullptr;
    lynEvent_t apuEvent = nullptr;
    CHECK_ERR(lynCreateEvent(&ipeEvent));
    CHECK_ERR(lynCreateEvent(&apuEvent));

    // 5. 创建各个资源池，避免重复申请与释放
    void *apuBuffer;
    BufferPool apuBufferPool(modelInfo.outputSize * modelInfo.batchSize, 5);
    BatchMem *pIpeOutBuf = nullptr;
    BatchMemPool oIpeOutMemPool(false, modelInfo.inputSize, modelInfo.batchSize,
                                5);
    BlockQueue<lynFrame_t *> blockQueue(5);
    m_framePool = new FramePool(nVdecOutSize, 5);
    FramePool vencRecvFramePool(nVdecOutSize, 5);

    // 6. 开启解码线程
    videoDecoder.Start(*channelInfo.context, std::ref(blockQueue),
                       std::ref(*m_framePool));

    FrameRater frameRate(channelInfo.channelName);
    frameRate.SetInterval(1);

    // 7. 加载 Plugin, 并设置后处理参数
    lynPlugin_t postPlugin;
    CHECK_ERR(lynPluginRegister(&postPlugin, channelInfo.postPlugPath.c_str()));
    lynPlugin_t osdPlugin;
    CHECK_ERR(lynPluginRegister(&osdPlugin, channelInfo.osdPlugPath.c_str()));

    YoloxPostProcessInfo_t postInfo;
    postInfo.ori_width = vdecOutInfo.width;
    postInfo.ori_height = vdecOutInfo.height;
    postInfo.width = modelInfo.width;
    postInfo.height = modelInfo.height;
    postInfo.class_num = modelInfo.classNum;
    postInfo.anchorSize = modelInfo.modelDesc->outputTensorAttrArray->dims[1];
    CHECK_ERR(lynMalloc((void **)&postInfo.boxesInfo, sizeof(lynBoxesInfo)));

    // 8. 初始化 IPE 处理类
    IpeParamModel ipeParam(modelInfo.width, modelInfo.height);
    ipeParam.SetImgInfo(vdecOutInfo.width, vdecOutInfo.height,
                        LYN_PIX_FMT_NV12);

    // 9. 根据传入的 ShowType 决定是使用显示窗口类, 视频编码类，BoxInfo 编码类
    VideoEncoder *videoEncoder = nullptr;
    BoxInfoEncode *boxInfoEncode = nullptr;
    if (channelInfo.showType == ShowType::SaveFile) {
      videoEncoder = new VideoEncoder;
      videoEncoder->Init(vdecOutInfo, outPath, m_framePool, &vencRecvFramePool);
    } else if (channelInfo.showType == ShowType::DirectShow) {
      // OpencvWindowManager::GetInstance()->SetWindowSize(strWindowName,
      // vdecOutInfo.height,
      //                                                   vdecOutInfo.width);
    } else if (channelInfo.showType == ShowType::OnlyApu) {
      boxInfoEncode = new BoxInfoEncode;
      boxInfoEncode->Init(outPath);
    }

    bool bEos = false;
    int iBatchIndex = 0;
    bool resetEncoder = false;
    while (!bEos) {
      // 10. 取出一个解码 Frame
      lynFrame_t *pFrameTmp;
      int ret = blockQueue.take(pFrameTmp);
      if (!ret) {
        continue;
      }

      bEos = pFrameTmp->eos;
      //分辨率发生变化，更新ipe参数
      if (bEos && videoDecoder.m_resolutionchange) {
        if (channelInfo.showType == ShowType::SaveFile) {
          videoEncoder->EncodeImage(pFrameTmp);
          videoEncoder->UnInit();
          resetEncoder = true;
        } else {
          m_framePool->Push(pFrameTmp);
        }
        bEos = false;
        videoDecoder.m_resolutionchange = false;
        continue;  //
      }

      videoDecoder.GetVdecOutInfo(&vdecOutInfo);
      ipeParam.SetImgInfo(vdecOutInfo.width, vdecOutInfo.height,
                          LYN_PIX_FMT_NV12);
      m_videoInfo = vdecOutInfo;
      postInfo.ori_width = vdecOutInfo.width;
      postInfo.ori_height = vdecOutInfo.height;

      if (resetEncoder) {
        vencRecvFramePool.ResetSize(vdecOutInfo.predictBufSize);
        videoEncoder->Init(vdecOutInfo, outPath, m_framePool,
                           &vencRecvFramePool);
        resetEncoder = false;
      }

      if (iBatchIndex == 0) {
        pIpeOutBuf = oIpeOutMemPool.GetBatchMem();
      }

      // 11. IPE 处理
      ipeParam.CalcParam(ipeStream, pFrameTmp->data, pIpeOutBuf->GetElement());

      ++iBatchIndex;

      // 12. IPE输出满了或是最后一个包，进行apu处理
      if (iBatchIndex == modelInfo.batchSize || bEos) {
        CHECK_ERR(lynRecordEvent(ipeStream, ipeEvent));
        CHECK_ERR(lynStreamWaitEvent(apuStream, ipeEvent));
        apuBuffer = apuBufferPool.Pop();
        // 13. 调用 APU 推理接口
        CHECK_ERR(lynExecuteModelAsync(apuStream, modelInfo.model,
                                       pIpeOutBuf->Buffer(), apuBuffer,
                                       modelInfo.batchSize));

        // 14. 在 APU 推理完之后，调用 Plugin 进行后处理
        CHECK_ERR(lynRecordEvent(apuStream, apuEvent));
        CHECK_ERR(lynStreamWaitEvent(postStream, apuEvent));

        postInfo.output_tensor = apuBuffer;
        postInfo.nms_top_k = 500;
        postInfo.score_threshold = 0.25;
        postInfo.nms_threshold = 0.45;
        postInfo.is_pad_resize = 1;
        CHECK_ERR(lynPluginRunAsync(postStream, postPlugin, "lynYoloxPostProcess",
                                    &postInfo, sizeof(postInfo)));

        PLUGIN_DATA_T *callbackData = new PLUGIN_DATA_T;
        callbackData->pIpeBuffer = pIpeOutBuf;
        callbackData->pIpePool = &oIpeOutMemPool;
        callbackData->apuBuffer = apuBuffer;
        callbackData->apuBufferPool = &apuBufferPool;
        callbackData->pFrameRater = &frameRate;

        auto callback = [](void *para) -> lynError_t {
          PLUGIN_DATA_T *callbackData = (PLUGIN_DATA_T *)para;
          callbackData->pFrameRater->AddFrame(1);
          callbackData->apuBufferPool->Push(callbackData->apuBuffer);
          callbackData->pIpePool->PutBatchMem(callbackData->pIpeBuffer);
          delete callbackData;
          return 0;
        };
        lynStreamAddCallback(postStream, callback, callbackData);
        // 选择 OnlyApu 时，不进行画框处理
        if (channelInfo.showType != ShowType::OnlyApu) {
          lynDrawBoxAndTextPara drawPara;
          drawPara.imgData = pFrameTmp->data;
          drawPara.imgFmt = LYN_PIX_FMT_NV12;
          drawPara.imgW = vdecOutInfo.width;
          drawPara.imgH = vdecOutInfo.height;
          drawPara.boxesInfo = postInfo.boxesInfo;
          CHECK_ERR(lynPluginRunAsync(postStream, osdPlugin, "lynDrawBoxAndText",
                                      &drawPara, sizeof(drawPara)));
        }
        // 15. 根据传入的 ShowType 决定是进行视频编码还是直接显示
        if (channelInfo.showType == ShowType::SaveFile) {
          // 视频编码
          pFrameTmp->eos = bEos;
          videoEncoder->WaitForStream(postStream);
          videoEncoder->EncodeImage(pFrameTmp);
        } else if (channelInfo.showType == ShowType::DirectShow) {
          auto showCallback = [](void *para) -> int {
            SHOW_DATA_T *callbackData = (SHOW_DATA_T *)para;
            callbackData->frameQueue->put(callbackData->pVdecFrame);
            delete callbackData;
            return 0;
          };
          SHOW_DATA_T *showData = new SHOW_DATA_T;
          showData->pVdecFrame = pFrameTmp;
          showData->frameQueue = &m_queue;
          lynStreamAddCallback(postStream, showCallback, showData);
        } else if (channelInfo.showType == ShowType::OnlyApu) {
          // 保存推理结果
          RESULT_BOXINFO_T *resultCallback = new RESULT_BOXINFO_T;
          // 将推理结果拷贝至 Host 侧
          lynMemcpyAsync(postStream, &resultCallback->boxInfo,
                         postInfo.boxesInfo, sizeof(lynBoxesInfo),
                         ServerToClient);
          resultCallback->encode = boxInfoEncode;
          resultCallback->frame = pFrameTmp;
          resultCallback->pVdecFramePool = m_framePool;
          lynStreamAddCallback(
              postStream,
              [](void *data) -> int {
                RESULT_BOXINFO_T *resultCallback = (RESULT_BOXINFO_T *)data;
                bool eos = resultCallback->frame->eos;

                if (resultCallback->frame->userPtr) {
                  //////添加地方三
                  float pp = (uint64_t)resultCallback->frame->userPtr / 1000.0f;
                  std::cout << "------take----- " << pp << "ms" << std::endl;
                }

                resultCallback->pVdecFramePool->Push(resultCallback->frame);
                resultCallback->encode->EncodeToJson(resultCallback->boxInfo,
                                                     eos);
                delete resultCallback;
                return 0;
              },
              resultCallback);
        }
        iBatchIndex = 0;
      }
    }

    // 16. 等待流中事件处理完，并销毁资源
    CHECK_ERR(lynSynchronizeStream(ipeStream));
    CHECK_ERR(lynSynchronizeStream(apuStream));
    CHECK_ERR(lynSynchronizeStream(postStream));
    CHECK_ERR(lynDestroyStream(ipeStream));
    CHECK_ERR(lynDestroyStream(apuStream));
    CHECK_ERR(lynDestroyStream(postStream));
    CHECK_ERR(lynDestroyEvent(ipeEvent));
    CHECK_ERR(lynDestroyEvent(apuEvent));
    CHECK_ERR(lynPluginUnregister(postPlugin));  // Unregister plugin
    CHECK_ERR(lynPluginUnregister(osdPlugin));  // Unregister plugin
    CHECK_ERR(lynFree(postInfo.boxesInfo));

    if (channelInfo.showType == ShowType::DirectShow) {
      // OpencvWindowManager::GetInstance()->Close(strWindowName);
    } else if (channelInfo.showType == ShowType::SaveFile) {
      videoEncoder->UnInit();
      delete videoEncoder;
      videoEncoder = nullptr;
    } else if (channelInfo.showType == ShowType::OnlyApu) {
      boxInfoEncode->UnInit();
      delete boxInfoEncode;
      boxInfoEncode = nullptr;
    }
    videoDecoder.Stop();
    videoDecoder.UnInit();
    modelInfo.UnLoadModel();
  }
};
