/**
* \brief 
* \author pengcheng (pengcheng@yslrpch@126.com)
* \date 2020-06-18
* \attention CopyrightÃ‚Â©ADC Technology(tianjin)Co.Ltd
* \attention Refer to COPYRIGHT.txt for complete terms of copyright notice
*/
#include <opencv2/opencv.hpp>
#include <cmw_app/front_vision.h>
#include <memory>
#include <yaml-cpp/yaml.h>
#include<vector>
#include <opencv2/viz.hpp>
#include <common/impl/cxxopts.hpp>
#include <mutex>
#include <thread>
#include <chrono>
#include <ctime>
#include <iostream>   
#include <unistd.h>
#include <queue>
#include <yaml-cpp/yaml.h>
#include "common/project_root.h"
#include <glog/logging.h>
#include <iostream> 
#include <fstream> 
#include <iomanip> 

std::shared_ptr<adc::StereoSgmApp> stereo_app_ptr;
typedef std::vector<cv::Vec3b> PointColorType;
typedef std::vector<cv::Point3f> PointCloudType;
typedef  std::chrono::system_clock::time_point  TimeType;
typedef cv::Mat PointCloudMat;
typedef  std::shared_ptr<std::list<vector<Tn::Bbox>>> OutputDataType;
typedef  std::list<vector<Tn::Bbox>> OutputDataInstanceType;
typedef  std::chrono::system_clock::time_point  TimeType;
typedef  cv::Mat ImageType;
typedef std::vector<std::string> ClassNameType;
typedef detection::ImageBBox2D<float> ImageBBox2DType;
typedef std::vector<ImageBBox2DType> ImageBBoxes2DType;


cv::viz::Viz3d plot3d("3d Frame");
cv::viz::Viz3d plot3ds("3ds Frame");
cv::Mat left_img, right_img;
std::mutex  mutex_sync_bbox, mutex_sync_depth;
cv::Mat point_cloud;

const unsigned int kCountBuff = 10;


struct DepthTime
{
  PointCloudMat point_mat;
  TimeType time_stamp;
};

struct BboxTime
{
  vector<Tn::Bbox> bboxs;
  ClassNameType names;
  TimeType time_stamp;
  cv::Mat img;

};
std::queue<DepthTime> depth_time_cahce;
std::queue<BboxTime> bbox_time_cahce;

void callback_deal_points_mat(PointCloudMat& point_cloud_float, TimeType& tm)
{
  DepthTime dt;
  dt.point_mat = point_cloud_float.clone();
  dt.time_stamp = tm;
  {
    std::lock_guard<std::mutex> mutex_sync_depthes(mutex_sync_depth);
    depth_time_cahce.push(dt);
    if(depth_time_cahce.size() > kCountBuff) depth_time_cahce.pop();
  }
}

void  callback_deal_point(PointCloudType& pt_cloud, PointColorType& pt_color, TimeType &time)
{
    if(pt_cloud.empty()||pt_color.empty()) return;

    cv::viz::WCloud cloud_widget = cv::viz::WCloud(pt_cloud, pt_color);
    cloud_widget.setRenderingProperty(cv::viz::POINT_SIZE, 2);
    plot3d.showWidget("ref cloud", cloud_widget);
    plot3d.wasStopped();
    plot3d.spinOnce(1, true);
}

void callback_vis_bbox(OutputDataType& outputs, TimeType& time, ImageType& img, ClassNameType& class_name )
{
  auto output = (*outputs).begin();
  for(vector<Tn::Bbox>::iterator bbox = output->begin(); bbox != output->end(); ++bbox)
  {
    auto box = *bbox;

    cv::rectangle(img,cv::Point(box.left,box.top),cv::Point(box.right,box.bot),cv::Scalar(0,0,255),3,8,0);
    std::cout << "class=" << box.classId << " prob=" << box.score*100 << std::endl;
    cout << "left=" << box.left << " right=" << box.right << " top=" << box.top << " bot=" << box.bot << endl;
    string str1=class_name[box.classId];
    cv::putText(img,str1, cv::Point(box.left, box.top-15), cv::FONT_HERSHEY_PLAIN, 1, cv::Scalar(0, 255, 0));     
  }
  cv::imshow("obstalce",img);
  BboxTime bt;
  bt.bboxs = (*output);
  bt.time_stamp = time;
  bt.names = class_name;
  bt.img = img;
  {
    std::lock_guard<std::mutex> mutex_sync_bboxes(mutex_sync_bbox);
    bbox_time_cahce.push(bt);
    if(bbox_time_cahce.size() > kCountBuff) bbox_time_cahce.pop();
  }
}

void callback_bbox(OutputDataType& outputs, TimeType& time, ImageType& img, ClassNameType& class_name )
{
  auto output = (*outputs).begin();
  BboxTime bt;
  bt.bboxs = (*output);
  bt.time_stamp = time;
  bt.names = class_name;
  bt.img = img;
  {
    std::lock_guard<std::mutex> mutex_sync_bboxes(mutex_sync_bbox);
    bbox_time_cahce.push(bt);
    if(bbox_time_cahce.size() > kCountBuff) bbox_time_cahce.pop();
  }
}

void callback_mark_ground(ImageBBoxes2DType& bboxes, TimeType& time)
{
    (void) bboxes;
    (void) time;
}

void callback_mark_ground_vis(ImageBBoxes2DType& bboxes, cv::Mat &img, TimeType& time)
{
    (void) bboxes;
    // (void) img;
    (void) time;
    for(auto bbox : bboxes)
    {
      cv::Rect rect(bbox.box2d.x1, bbox.box2d.y1, bbox.box2d.width, bbox.box2d.height);
      cv::rectangle(img, rect, cv::Scalar(10, 255, 10), 2);
    }
    cv::imshow("mark_ground", img);
    cv::waitKey(10);
}


void callback_CCTSDB(ImageBBoxes2DType& bboxes, cv::Mat &img, TimeType& time)
{
    (void) time;
    for(auto bbox : bboxes)
    {

      cv::Rect rect(bbox.box2d.x1, bbox.box2d.y1, bbox.box2d.width, bbox.box2d.height);
      cv::rectangle(img, rect, cv::Scalar(10, 255, 10), 2);
    
    }
    cv::imshow("sgin", img);
    cv::waitKey(10);

}

void MergeData(DepthTime& depth_data, BboxTime& bbox_data)
{
      PointCloudMat mat_point = depth_data.point_mat;
      vector<Tn::Bbox> bboxs = bbox_data.bboxs;
      ClassNameType names = bbox_data.names;
      cv::Mat img = bbox_data.img;
      std::vector<cv::Mat> channels(3);
      cv::split(mat_point.clone(), channels);
      cv::Mat color, depth_8u;
      cv::Mat depth = channels[2];
      depth.convertTo(depth_8u, CV_8U, 255/80);
      cv::applyColorMap(depth_8u, color, cv::COLORMAP_RAINBOW);
      color.setTo(cv::Scalar(0, 0, 0), depth < 0);
      for(auto bbox : bboxs)
      {
        int t = bbox.top + (bbox.bot - bbox.top)/4;
        int b = bbox.bot - (bbox.bot - bbox.top)/4;
        int l = bbox.left + (bbox.right - bbox.left)/4;
        int r = bbox.right - (bbox.right - bbox.left)/4;

        cv::Mat loacl_point = mat_point(cv::Range(t, b), cv::Range(l, r)).clone();    
        cv::rectangle(img,cv::Point(bbox.left,bbox.top),cv::Point(bbox.right,bbox.bot),cv::Scalar(0,0,255),3,8,0);
        cv::rectangle(color,cv::Point(bbox.left,bbox.top),cv::Point(bbox.right,bbox.bot),cv::Scalar(0,0,255),3,8,0);
        cv::rectangle(color,cv::Point(l, t),cv::Point(r,b),cv::Scalar(255,0,0),1,8,0);

        float *data_ptr = loacl_point.ptr<float>();
        float location[] = {10000,10000,10000};
        for(int index = 0; index < loacl_point.rows * loacl_point.cols; index++)
        {
          if( data_ptr[2] > 0 &&  location[2] > data_ptr[2])
          {
            location[0] = data_ptr[0];
            location[1] = data_ptr[1];
            location[2] = data_ptr[2];
            data_ptr += 3;
          }
        }
         std::string label ="z:" + std::to_string(location[2]);
         cv::putText(img,label, cv::Point((bbox.left + bbox.right)/2, (bbox.top + bbox.bot)/2), cv::FONT_HERSHEY_PLAIN, 1, cv::Scalar(0, 255, 0));
      }
      cv::imshow("depth", color);
      cv::imshow("detectiosn", img);
      cv::waitKey(10);
}
void ThreadSyncData()
{
  BboxTime bt;
  DepthTime dt; 
  while(true)
  {
    
    bool synced = false;
    { 
      std::lock_guard<std::mutex> mutex_sync_bboxes(mutex_sync_bbox);
      std::lock_guard<std::mutex> mutex_sync_depthes(mutex_sync_depth);
      while (!bbox_time_cahce.empty() && !depth_time_cahce.empty())
      {
        bt = bbox_time_cahce.front();
        dt =  depth_time_cahce.front();
        TimeType bbox_time = bt.time_stamp;
        TimeType depth_time = dt.time_stamp;
         std::chrono::duration<double> diff =  (bbox_time - depth_time);
        auto duration = diff.count();//s
        //LOG(INFO)<<"SYNC: "<<duration ;
        if(duration <= 5e-1 && duration >= -5e-1)
        
        {
          synced = true;
          bbox_time_cahce.pop();
          depth_time_cahce.pop();
          break;
        }
        else if (duration > 5e-1)
        {
          depth_time_cahce.pop();
        }
        else if (duration < -5e-1)
        {
          bbox_time_cahce.pop();
        }
      }
    }
    if(synced)
    {
      MergeData(dt, bt);
    }
    usleep(2000);
  }
  
}

int main(int argc, char **argv)
{
  cxxopts::Options options(argv[0], "double_camera_node");
  options.add_options()
      ("h,help", "this app is used for double_camera_node")
      ("c,config", "configuration file path",
      cxxopts::value<std::string>()->default_value("front_vision.yaml"))
      ("r,root_path", "root path",
      cxxopts::value<std::string>()->default_value("/opt/adc/"))
      ;
  cxxopts::ParseResult opts =  options.parse(argc, argv);
  std::string config_file = opts["config"].as<std::string>();

  config_file = adc::RootPath::GetAbsolutePath(config_file);
  LOG(INFO)<<"Front vision config: "<<config_file;
  std::string obstacle_detector_config;
  std::string stereo_vison_config;
  std::string left_camera_params;
  std::string cctsdb_detector_config;
  std::string mark_ground_detector_config;
  bool mono = false;
  bool vis_mark_ground = false;
  bool vis_pointcloud = false;
  bool vis_obstacle = false;
  bool fusion_depth_and_obstacle = false;
  float hz = 10;
  try
  {
    YAML::Node node = YAML::LoadFile(config_file);
    obstacle_detector_config = node["obstacle_detector_config"].as<std::string>();
    stereo_vison_config = node["stereo_vison_config"].as<std::string>();
    cctsdb_detector_config = node["cctsdb_detector_config"].as<std::string>();
    mark_ground_detector_config = node["mark_ground_detector_config"].as<std::string>();
    mono =  node["mono"].as<bool>();
    hz = node["hz"].as<float>(10.0);
    vis_mark_ground = node["vis_mark_ground"].as<bool>();
    vis_pointcloud = node["vis_pointcloud"].as<bool>();
    left_camera_params = node["left_camera_params"].as<std::string>();
    vis_obstacle = node["vis_obstacle"].as<bool>();
    fusion_depth_and_obstacle = node["fusion_depth_and_obstacle"].as<bool>();
  }
  catch (const std::exception& e)
  {
    LOG(ERROR)<<"Init Yaml Faile!";
    std::cerr << e.what() << '\n';
  }
  left_camera_params = adc::RootPath::GetAbsolutePath(left_camera_params);
  adc::FrontVisionApp::Ptr front_vision_app_ptr = std::make_shared<adc::FrontVisionApp>(left_camera_params);

  front_vision_app_ptr->SetTask(obstacle_detector_config, adc::FrontVisionApp::TASK::OBSTACLE_DETECTION_YOLO, hz);
  front_vision_app_ptr->SetTask(stereo_vison_config, adc::FrontVisionApp::TASK::STEREO, hz);
  front_vision_app_ptr->SetTask(cctsdb_detector_config, adc::FrontVisionApp::TASK::CCTSDB_DETECTION_CENTERNET, hz);
  front_vision_app_ptr->SetTask(mark_ground_detector_config, adc::FrontVisionApp::TASK::MARK_GROUND_DETECTION_CENTERNET, hz);
  LOG(INFO)<<"complated front vision set task";
  if(vis_pointcloud)
  {

    front_vision_app_ptr->SetStereoPointAndColorCallback(callback_deal_point);
    plot3d.showWidget("Coordinate Widget", cv::viz::WCoordinateSystem());
  } 
   front_vision_app_ptr->SetStereoPointCloudMatCallback(callback_deal_points_mat);

  if(vis_obstacle)
  {
   // front_vision_app_ptr->SetDetectionObjectCallback(callback_vis_bbox);
  } 
  else
  {
    front_vision_app_ptr->SetDetectionObjectCallback(callback_bbox);
  }
  if(vis_mark_ground)
  {
    front_vision_app_ptr->SetMarkGroundDetectionWithImgCallback(callback_mark_ground_vis);
  }
  else
  {
    front_vision_app_ptr->SetMarkGroundDetectionCallback(callback_mark_ground);
  }
  front_vision_app_ptr->SetCCTSDBDetectionWithImgCallback(callback_CCTSDB);

  front_vision_app_ptr->Ready();

  std::thread thread_sync_res;
  if(fusion_depth_and_obstacle)
  {
    thread_sync_res = std::thread(ThreadSyncData);
    thread_sync_res.detach();
  }      
  while(true)
  {
    if(mono)
    {
      left_img = cv::imread("/home/nvidia/workspace/front_vision/images/2967.png");
      if(!left_img.empty())
      {

        std::chrono::system_clock::time_point tm = std::chrono::system_clock::now();
        front_vision_app_ptr->SetImage(left_img, tm);
        left_img = cv::Mat();
      }
    }
    else
    {
      right_img = cv::imread("./images/704-cal_right.png");
      left_img = cv::imread("./images/704-cal_left.png");
      if(!left_img.empty() && !right_img.empty() )
      {
        std::chrono::system_clock::time_point tm = std::chrono::system_clock::now();
        front_vision_app_ptr->SetImages(left_img,  right_img, tm);
        left_img = cv::Mat();
        right_img = cv::Mat();
      }
    }
  // thread_sync_res.join();
  }
  return 0;
}