/**
 * @file main.cpp
 * @author SDK_TEAM
 * @brief
 * @version 0.1
 * @date 2023-12-01
 *
 * Copyright:
 * © 2018 北京灵汐科技有限公司 版权所有。
 * 注意：以下内容均为北京灵汐科技有限公司原创，未经本公司允许，不得转载，否则将视为侵权；对于不遵守此声明或者其他违法使用以下内容者，本公司依法保留追究权。
 * © 2018 Lynxi Technologies Co., Ltd. All rights reserved.
 * NOTICE: All information contained here is, and remains the property of Lynxi.
 * This file can not be copied or distributed without the permission of Lynxi
 * Technologies Co., Ltd.
 *
 */
#include <lyn_api.h>
#include <lyn_plugin.h>
#include <unistd.h>
#include <iostream>
#include <memory>
#include "ObjectTrack.hpp"
#include "argsParser.h"
#include "ipeParamEx.h"
#include "framePool.hpp"
#include "opencvWindow.hpp"
#include "yolox_post_process.h"
#include "util.hpp"
using namespace std;

#define WINDOW_WIDTH 1920
#define WINDOW_HEIGH 1080
#define CAPACITY_LIMIT 150  //车牌识别模型的最大帧率约为150
void MergeAndDisplay(lynContext_t *context, std::string windowName,
                     std::vector<std::shared_ptr<ObjectTrack>> &channels) {
  CHECK_ERR(lynSetCurrentContext(*context));
  lynStream_t drawStream;
  CHECK_ERR(lynCreateStream(&drawStream));
  IpeParamModel ipeMerge(0, 0);
  ipeMerge.SetImgInfo(WINDOW_WIDTH, WINDOW_HEIGH, LYN_PIX_FMT_NV12);

  OpencvWindowManager::GetInstance()->SetWindowSize(windowName, WINDOW_HEIGH,
                                                    WINDOW_WIDTH);
  int channelNumber = channels.size();
  int window = sqrt(channelNumber);
  window += pow(window, 2) < channelNumber ? 1 : 0;
  std::cout << "window: " << window << std::endl;
  std::vector<ImageMergeInfo> mergeInfos;
  for (int h = 0; h < window; ++h) {
    for (int w = 0; w < window; ++w) {
      ImageMergeInfo mergeInfo;
      // 计算视频播放的位置
      mergeInfo.x = w * (WINDOW_WIDTH / window);
      mergeInfo.y = h * (WINDOW_HEIGH / window);
      mergeInfo.width = WINDOW_WIDTH / window;
      mergeInfo.heigh = WINDOW_HEIGH / window;
      mergeInfo.frame = nullptr;
      mergeInfos.emplace_back(mergeInfo);
    }
  }
  lynPlugin_t plugin;
  CHECK_ERR(lynPluginRegister(&plugin, ArgsParse::argOsdPluginPath.c_str()));
  lynDrawBoxAndTextPara drawPara;
  drawPara.imgFmt = LYN_PIX_FMT_NV12;
  drawPara.imgW = WINDOW_WIDTH;
  drawPara.imgH = WINDOW_HEIGH;
  lynBoxesInfo boxesInfo;
  boxesInfo.boxesNum = mergeInfos.size();
  for (uint32_t i = 0; i < boxesInfo.boxesNum; ++i) {
    boxesInfo.boxes[i].xmin = mergeInfos[i].x;
    boxesInfo.boxes[i].xmax = mergeInfos[i].x + mergeInfos[i].width;
    boxesInfo.boxes[i].ymin = mergeInfos[i].y;
    boxesInfo.boxes[i].ymax = mergeInfos[i].y + mergeInfos[i].heigh;
    snprintf(boxesInfo.boxes[i].label, 2, " ");
  }
  void *drawDev;
  CHECK_ERR(lynMalloc((void **)&drawDev, sizeof(lynBoxesInfo)));
  CHECK_ERR(
      lynMemcpy(drawDev, &boxesInfo, sizeof(lynBoxesInfo), ClientToServer));
  drawPara.boxesInfo = (lynBoxesInfo *)drawDev;
  FramePool recvPool;
  int poolSize = 1;
  recvPool.init(WINDOW_HEIGH * WINDOW_WIDTH * 3 / 2, poolSize);
  std::vector<lynCodecBuf_t *> tmpVec;
  for (int i = 0; i < poolSize; ++i) {
    auto tmp = recvPool.Pop();
    // 将背景设为黑色
    CHECK_ERR(lynMemset(tmp->data, 0, WINDOW_WIDTH * WINDOW_HEIGH));
    CHECK_ERR(lynMemset(tmp->data + WINDOW_WIDTH * WINDOW_HEIGH, 128,
                        WINDOW_WIDTH * WINDOW_HEIGH * 0.5));
    drawPara.imgData = tmp->data;
    // 画上分割多宫格线
    CHECK_ERR(lynPluginRunAsync(drawStream, plugin, "lynDrawBoxAndText",
                                &drawPara, sizeof(drawPara)));
    CHECK_ERR(lynSynchronizeStream(drawStream));
    CHECK_ERR(lynDestroyStream(drawStream));

    tmpVec.emplace_back(
        tmp);  // 将所有从 recvPool 中的数据保存起来，等待全部处理完再归还
  }
  for (auto &tmp : tmpVec) {
    recvPool.Push(tmp);
  }
  CHECK_ERR(lynPluginUnregister(plugin));
  CHECK_ERR(lynFree(drawDev));

  bool eos = false;
  // 在线流的超时时间设置为 1ms，避免某一路断连，影响其他路
  int getFrameTimeout = (g_inputType == File) ? 60000 : 1;
  lynStream_t mergeStream;
  CHECK_ERR(lynCreateStream(&mergeStream));
  while (!eos) {
    for (size_t i = 0; i < channels.size(); i++) {
      lynFrame_t *frame = nullptr;
      bool ret = channels[i]->getFrame(&frame, getFrameTimeout);
      channels[i]->getVideoInfo(mergeInfos[i].videoInfo);
      mergeInfos[i].frame = frame;
      eos = ret ? frame->eos : eos;
    }
    auto result = recvPool.Pop();
    ipeMerge.MergeImage(mergeStream, result->data, mergeInfos);

    addCallback(mergeStream, [channels, mergeInfos, &windowName, result,
                              &recvPool]() {
      for (size_t i = 0; i < mergeInfos.size(); ++i) {
        if (mergeInfos[i].frame != nullptr) {
          channels[i]->putFrame(mergeInfos[i].frame);
        }
      }
      OpencvWindowManager::GetInstance()->ShowDevicesData(result, windowName);
      recvPool.Push(result);
    });
    if (g_inputType ==
        Stream) {  // 由于上面超时时间只设置为 1ms，取太快的话取不到相应的帧
      // std::this_thread::sleep_for(std::chrono::milliseconds(60));
    }
  }

  // 等待所有的任务处理完
  CHECK_ERR(lynSynchronizeStream(mergeStream));
  CHECK_ERR(lynDestroyStream(mergeStream));
  OpencvWindowManager::GetInstance()->Close(windowName);
}

int main(int argc, char *argv[]) {
  if (ArgsParse::parse(argc, argv) == false) {
    return -1;
  }

  std::string strModelPath = ArgsParse::argCarDetectionModelPath;
  std::string strVideoPath = ArgsParse::argInputFilePath;
  g_inputType = (access(strVideoPath.c_str(), F_OK) == 0) ? File : Stream;
  std::string strVideoOutTmp = strVideoPath.substr(0, strVideoPath.rfind('.'));
  std::vector<std::shared_ptr<ObjectTrack>> plateDetectWorkers;
  std::vector<lynContext_t *> contextVec;

  for (int deviceID : ArgsParse::argChipsVec) {
    auto *context = new lynContext_t;
    CHECK_ERR(lynCreateContext(context, deviceID));
    for (uint32_t j = 0; j < ArgsParse::argChannel; ++j) {
      auto objectTrack = std::make_shared<ObjectTrack>();
      objectTrack->context = context;
      objectTrack->channelName =
          "device" + std::to_string(deviceID) + "_channel" + std::to_string(j);
      objectTrack->strVideoInputPath = strVideoPath;
      objectTrack->strVideoOutputPath =
          strVideoOutTmp + "_Plugin_" + objectTrack->channelName + ".264";
      objectTrack->carDetectModelPath = ArgsParse::argCarDetectionModelPath;
      objectTrack->featureExtractModelPath =
          ArgsParse::argFeatureExtractionModelPath;
      objectTrack->yoloPluginPath = ArgsParse::argYoloPostPluginPath;    
      objectTrack->argOsdPluginPath = ArgsParse::argOsdPluginPath;
      objectTrack->deepsortPluginPath = ArgsParse::argDeepPostPluginPath;
      objectTrack->showType = ArgsParse::argShowType;
      objectTrack->inputType =
          (access(strVideoPath.c_str(), F_OK) == 0) ? File : Stream;
      objectTrack->createStartThread();
      plateDetectWorkers.emplace_back(objectTrack);
    }
    contextVec.emplace_back(context);
  }
  if (ArgsParse::argShowType == ShowType::DirectShow) {
    // 每个芯片只创建一个 context，所以使用此队列进行判断
    if (contextVec.size() > 1) {
      std::cout << "DirectShow only support single chip!" << std::endl;
      quick_exit(0);
    }
    std::string windowName("plate recognition");
    OpencvWindowManager::GetInstance()->SetWindowName(windowName);
    std::thread mergeThread(MergeAndDisplay, contextVec[0], windowName,
                            std::ref(plateDetectWorkers));
    OpencvWindowManager::GetInstance()->Process();
    mergeThread.join();
  }

  for (auto &plateDetectWorker : plateDetectWorkers) {
    plateDetectWorker->stop();
    // 在context还有效时，将plateDetectWorker析构
    plateDetectWorker.reset();
  }
  for (auto &devCtx : contextVec) {
    if (devCtx) {
      CHECK_ERR(lynSetCurrentContext(*devCtx));
      CHECK_ERR(lynDestroyContext(*devCtx));
      delete devCtx;
    }
  }
  return 0;
}