/*
 * Copyright(C) 2022. Huawei Technologies Co.,Ltd. All rights reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#include <iostream>
#include <vector>
#include "cppv2.h"
#include "MxBase/Log/Log.h"
#include "opencv2/opencv.hpp"
#include "cropResizePaste.hpp"
#include <queue>
#include <thread>

using namespace Openpose;
using namespace cv;
#define USE_DVPP

namespace
{
  const int TEST_IMAGE_COUNT = 20;
  const uint32_t OP_RESIZE = 560;

  const cv::Point COCO_PAIRS[] = {{1, 2}, {1, 5}, {2, 3}, {3, 4}, {5, 6}, {6, 7}, {1, 8}, {8, 9}, {9, 10}, {1, 11}, {11, 12}, {12, 13}, {1, 0}, {0, 14}, {14, 16}, {0, 15}, {15, 17}, {2, 16}, {5, 17}}; //  # = 19

  const cv::Point COCO_PAIRS_RENDER[] = {{1, 2}, {1, 5}, {2, 3}, {3, 4}, {5, 6}, {6, 7}, {1, 8}, {8, 9}, {9, 10}, {1, 11}, {11, 12}, {12, 13}, {1, 0}, {0, 14}, {14, 16}, {0, 15}, {15, 17}};

  const cv::Scalar COCO_COLORS[] = {{255, 0, 0}, {255, 85, 0}, {255, 170, 0}, {255, 255, 0}, {170, 255, 0}, {85, 255, 0}, {0, 255, 0}, {0, 255, 85}, {0, 255, 170}, {0, 255, 255}, {0, 170, 255}, {0, 85, 255}, {0, 0, 255}, {85, 0, 255}, {170, 0, 255}, {255, 0, 255}, {255, 0, 170}, {255, 0, 85}};

  BlockingQueue<MxBase::Image> readImageQueue;
  BlockingQueue<MxBase::Image> resizeQueue;
  BlockingQueue<std::vector<MxBase::Tensor>> inferQueue;

  const uint32_t TIME_POP = 100;
  const uint32_t DEVICE_ID = 0;
  const std::string modelPath = "models/openpose_pytorch_560.om";
  bool stopFlag = false;
  V2Param v2Param2(DEVICE_ID, modelPath);
  auto openpose2 = std::make_shared<OpV2Cppv2>(v2Param2);
}
void readImageThread(int j)
{
  LogInfo << "start to read image thread.";
  while (!stopFlag)
  {
    MxBase::Image decodedImage;
    std::string imgPath = "./pic/test" + std::to_string(j) + ".jpg";
    openpose2->ReadImage(imgPath, decodedImage);

    readImageQueue.Push(decodedImage);
  }
  return;
}

// resize
void resizeThread()
{
  LogInfo << "start to resize thread.";
  while (!stopFlag)
  {
    MxBase::Image decodedImage;
    readImageQueue.Pop(decodedImage, TIME_POP);

    std::shared_ptr<MxBase::ImageProcessor> imageProcessorDptr;
    imageProcessorDptr = std::make_shared<MxBase::ImageProcessor>(DEVICE_ID);
    MxBase::Image resizeImage;
    resizeImage = resizeKeepAspectRatioFit(decodedImage.GetOriginalSize().width, decodedImage.GetOriginalSize().height,
                                           OP_RESIZE, OP_RESIZE, decodedImage, *openpose2->imageProcessorDptr);
    resizeQueue.Push(resizeImage);
  }
  return;
}

void inferThread()
{
  LogInfo << "start to infer thread.";
  int count = 0;
  while (!stopFlag)
  {
    MxBase::Image resizeImage;
    resizeQueue.Pop(resizeImage, TIME_POP);
    std::vector<MxBase::Tensor> openposeOutputs;
    openpose2->OpInfer(resizeImage, openposeOutputs);

    inferQueue.Push(openposeOutputs);
    count++;
  }
  return;
}

void PostprocessThread(bool isEval)
{
  LogInfo << "start to Postprocess thread.";
  APP_ERROR ret;
  int imageCount = 0;
  while (!stopFlag)
  {
    if (imageCount >= TEST_IMAGE_COUNT)
    {
      stopFlag = true;
    }
    std::vector<MxBase::Tensor> openposeOutputs;
    ret = inferQueue.Pop(openposeOutputs, TIME_POP);
    if (ret != APP_ERR_OK)
    {
      LogError << "inferQueue2pocessone pop failed" << ret;
      continue;
    }

    std::vector<std::vector<PartPair>> personList = {};
    std::shared_ptr<mxpiopenposeproto::MxpiPersonList> dstMxpiPersonListSptr = std::make_shared<mxpiopenposeproto::MxpiPersonList>();
    openpose2->GeneratePersonList(openposeOutputs, personList, *dstMxpiPersonListSptr, isEval);
    imageCount++;
  }
  return;
}

void InitV2Param(V2Param &v2Param)
{
  v2Param.deviceId = 0;
  v2Param.modelPath = "models/openpose_pytorch_560.om";
};
bool notin(std::vector<int> seen_idx, int idx)
{
  int nRet = std::count(seen_idx.begin(), seen_idx.end(), idx);
  if (nRet == 0)
  {
    return true;
  }
  else
  {
    return false;
  }
}
void drawskeleton(cv::Mat imag, std::vector<PartPair> skelelist)
{
  int radius = 3;
  int thickness = 3;
  int line_type = 8;
  int shift = 0;
  int num = 17;
  std::vector<int> seen_idx = {};
  cv::Point center;
  cv::Point centers[40];
  std::vector<int> x_coords = {}, y_coords = {};
  for (uint32_t i = 0; i < skelelist.size(); i++)
  {
    int part_idx1 = skelelist[i].partIdx1;
    int part_idx2 = skelelist[i].partIdx2;
    if (notin(seen_idx, part_idx1))
    {
      seen_idx.push_back(part_idx1);
      center.x = skelelist[i].coord1[0];
      center.y = skelelist[i].coord1[1];
      centers[part_idx1].x = center.x;
      centers[part_idx1].y = center.y;
      x_coords.push_back(center.x);
      y_coords.push_back(center.y);
      cv::circle(imag, center, radius, COCO_COLORS[part_idx1], thickness, line_type, shift);
    }
    if (notin(seen_idx, part_idx2))
    {
      seen_idx.push_back(part_idx2);
      center.x = skelelist[i].coord2[0];
      center.y = skelelist[i].coord2[1];
      centers[part_idx2].x = center.x;
      centers[part_idx2].y = center.y;
      x_coords.push_back(center.x);
      y_coords.push_back(center.y);
      cv::circle(imag, center, radius, COCO_COLORS[part_idx2], thickness, line_type, shift);
    }
  }

  for (int i = 0; i < num; i++)
  {
    if (notin(seen_idx, COCO_PAIRS_RENDER[i].x) || notin(seen_idx, COCO_PAIRS_RENDER[i].y))
      continue;
    cv::line(imag, centers[COCO_PAIRS_RENDER[i].x], centers[COCO_PAIRS_RENDER[i].y], COCO_COLORS[i], thickness);
  }
}

int main(int argc, char const *argv[])
{
  bool isEval = 0;
  if (argc <= 1)
  {
    LogWarn << "Please input image path, such as './cppv2_sample test.jpg'.";
    return APP_ERR_OK;
  }
  APP_ERROR ret;

  if (strcmp("eval", argv[1]) == 0)
  {
    isEval = 1;
  }
  // test.jpg
  if (isEval == 0)
  {
    // Init
    V2Param v2Param;
    InitV2Param(v2Param);
    auto openpose = std::make_shared<OpV2Cppv2>(v2Param);

    // Read Pictures
    MxBase::Image decodedImage;
    std::string imgPath = argv[1];
    ret = openpose->ReadImage(imgPath, decodedImage);
    if (ret != APP_ERR_OK)
    {
      LogError << "OpV2Cppv2 ReadImage failed,Please input image path. ret=" << ret << ".";
      return ret;
    }
    // Resize
    MxBase::Image resizeImage;
    resizeImage = resizeKeepAspectRatioFit(decodedImage.GetOriginalSize().width, decodedImage.GetOriginalSize().height,
                                           OP_RESIZE, OP_RESIZE, decodedImage, *openpose->imageProcessorDptr);
    std::shared_ptr<MxBase::ImageProcessor> imageProcessorDptrone =
        std::make_shared<MxBase::ImageProcessor>(v2Param.deviceId);
    // Infer
    std::vector<MxBase::Tensor> openposeOutputs;
    ret = openpose->OpInfer(resizeImage, openposeOutputs);
    if (ret != APP_ERR_OK)
    {
      LogError << "OpV2Cppv2 Infer failed, ret=" << ret << ".";
      return ret;
    }

    // postprocess
    std::vector<std::vector<PartPair>> personList = {};
    std::shared_ptr<mxpiopenposeproto::MxpiPersonList> dstMxpiPersonListSptr = std::make_shared<mxpiopenposeproto::MxpiPersonList>();
    openpose->GeneratePersonList(openposeOutputs, personList, *dstMxpiPersonListSptr, isEval);
    // Draw
    cv::Mat imag = imread(imgPath, cv::IMREAD_COLOR);
    for (int i = 0; i < int(personList.size()); i++)
    {
      drawskeleton(imag, personList[i]);
    }
    cv::imwrite("./result.jpg", imag);
    std::cout << "the picture process successfully " << std::endl;
    return APP_ERR_OK;
  }

  // eavl
  if (isEval == 1)
  {
    double_t sumPostProcessTime = 0;
    for (int j = 1; j < TEST_IMAGE_COUNT + 1; j++)
    {
      double_t PostProcessTime = 0;
      std::chrono::high_resolution_clock::time_point start = std::chrono::high_resolution_clock::now();

      std::thread threadReadImage(readImageThread, j);

      std::thread threadResize(resizeThread);

      std::thread threadInfer(inferThread);

      std::thread threadPostprocess(PostprocessThread, isEval);

      threadReadImage.join();
      threadResize.join();
      threadInfer.join();
      threadPostprocess.join();

      std::chrono::high_resolution_clock::time_point end = std::chrono::high_resolution_clock::now();
      PostProcessTime = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
      sumPostProcessTime += PostProcessTime;
    }
    std::cout << "total process time: " << sumPostProcessTime << "s." << std::endl;
    std::cout << "average process time: " << sumPostProcessTime / TEST_IMAGE_COUNT << "s." << std::endl;
    std::cout << "All down" << std::endl;
  }
  return APP_ERR_OK;
}