/*
 * Copyright (c) 2020, Robobrain.
 * All rights reserved.
 *
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions are met:
 *
 *     * Redistributions of source code must retain the above copyright
 *       notice, this list of conditions and the following disclaimer.
 *     * Redistributions in binary form must reproduce the above copyright
 *       notice, this list of conditions and the following disclaimer in the
 *       documentation and/or other materials provided with the distribution.
 *     * Neither the name of the Willow Garage, Inc. nor the names of its
 *       contributors may be used to endorse or promote products derived from
 *       this software without specific prior written permission.
 *
 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
 * POSSIBILITY OF SUCH DAMAGE.
 */

/* Author: Konstantinos Konstantinidis */

#include "n_lidar_obj/l_shape_track/datmo.hpp"

Datmo::Datmo()
{
  ros::NodeHandle n;
  ros::NodeHandle n_private("~");
  ROS_INFO("Starting Detection And Tracking of Moving Objects");

  n_private.param("lidar_frame", lidar_frame, string("laser"));
  //n_private.param("lidar_frame", lidar_frame, string("base_link"));
  n_private.param("world_frame", world_frame, string("map"));
  ROS_INFO("The lidar_frame is: %s and the world frame is: %s", lidar_frame.c_str(), world_frame.c_str());
  n_private.param("threshold_distance", dth, 0.2);
  n_private.param("max_cluster_size", max_cluster_size, 1000);
  //n_private.param("max_cluster_size", max_cluster_size, 360);
  n_private.param("threshold_distance", threshold_distance, 0.17); // add
  n_private.param("euclidean_distance", euclidean_distance, 0.25);

  n_private.param("pub_markers", p_marker_pub, true);
  //n_private.param("pub_markers", p_marker_pub, false);

  pub_tracks_box_kf = n.advertise<msg_datmo::TrackArray>("datmo/box_kf", 10);
  pub_marker_array = n.advertise<visualization_msgs::MarkerArray>("datmo/marker_array", 10);
  sub_scan = n.subscribe("/msg_lidar_prep", 1, &Datmo::callback, this);

  depth_cluster.Init();
}

Datmo::~Datmo()
{
}
void Datmo::callback(const msg_lidar_prep::msg_lidar_prep::ConstPtr &scan_in)
{

  // delete all Markers
  visualization_msgs::Marker marker;
  visualization_msgs::MarkerArray markera;
  marker.action = 3;
  markera.markers.push_back(marker);
  pub_marker_array.publish(markera);

  // Only if there is a transform between the world and lidar frame continue
  // if (tf_listener.canTransform(world_frame, lidar_frame, ros::Time()))
  // {
  //Find position of ego vehicle in world frame, so it can be fed through to the cluster objects
  tf::StampedTransform ego_pose;
  // world_frame是固定的
  // ego_pose可以作为reference frame
  // 主要实现lidar_frame->world_frame, 变换关系存在ego_pose
  // tf_listener.lookupTransform(world_frame, lidar_frame, ros::Time(0), ego_pose);

  //TODO implement varying calculation of dt
  dt = 0.08;

  if (time > ros::Time::now())
  {
    clusters.clear();
  }
  time = ros::Time::now();
  auto start = chrono::steady_clock::now();

  //vector<pointList> point_clusters_not_transformed; // 聚类的目标
  vector<pointList> point_clusters;

  // 聚类算法
  Datmo::Clustering(scan_in, point_clusters);

  // Cluster Association based on the Euclidean distance
  // I should check first all the distances and then associate based on the closest distance

  // point_clusters:当前帧的目标
  vector<bool> g_matched(point_clusters.size(), false); // The Group has been matched with a Cluster
  // clusters:上一帧的目标
  vector<bool> c_matched(clusters.size(), false); // The Cluster object has been matched with a group

  double euclidean[point_clusters.size()][clusters.size()]; // Matrix object to save the euclidean distances

  //Finding mean coordinates of group and associating with cluster Objects
  double mean_x = 0, mean_y = 0;

  for (unsigned int g = 0; g < point_clusters.size(); ++g)
  {
    double sum_x = 0, sum_y = 0;

    for (unsigned int l = 0; l < point_clusters[g].size(); l++)
    {
      sum_x = sum_x + point_clusters[g][l].first;
      sum_y = sum_y + point_clusters[g][l].second;
    }
    mean_x = sum_x / point_clusters[g].size();
    mean_y = sum_y / point_clusters[g].size();
    // 上一时刻标0和这一时刻目标0～j之间质心的距离
    for (unsigned int c = 0; c < clusters.size(); ++c)
    {
      euclidean[g][c] = abs(mean_x - clusters[c].meanX()) + abs(mean_y - clusters[c].meanY());
    }
  }

  //Find the smallest euclidean distance and associate if smaller than the threshold
  vector<pair<int, int>> pairs;
  for (unsigned int c = 0; c < clusters.size(); ++c)
  {
    unsigned int position;
    double min_distance = euclidean_distance;
    // 遍历当前测量得到一个最小euclidean[g][c]和其索引
    for (unsigned int g = 0; g < point_clusters.size(); ++g)
    {
      if (euclidean[g][c] < min_distance)
      {
        min_distance = euclidean[g][c];
        position = g;
      }
    }
    if (min_distance < euclidean_distance)
    {
      // c与position关联
      g_matched[position] = true, c_matched[c] = true;
      pairs.push_back(pair<int, int>(c, position));
    }
  }

//Update Tracked Clusters
#pragma omp parallel for
  // 没有在pair里面的都扔掉了？
  for (unsigned int p = 0; p < pairs.size(); ++p)
  {
    // 这个update太厉害了：shape_track dynamic_track 都更新了
    clusters[pairs[p].first].update(point_clusters[pairs[p].second], dt, ego_pose);
  }

  //Delete Not Associated Clusters
  // 上一帧没有关联的目标就全部删除了
  unsigned int o = 0;
  unsigned int p = clusters.size();
  while (o < p)
  {
    if (c_matched[o] == false)
    {

      std::swap(clusters[o], clusters.back());
      clusters.pop_back();

      std::swap(c_matched[o], c_matched.back());
      c_matched.pop_back();

      o--;
      p--;
    }
    o++;
  }

  // Initialisation of new Cluster Objects
  for (unsigned int i = 0; i < point_clusters.size(); ++i)
  {
    if (g_matched[i] == false && point_clusters[i].size() < max_cluster_size)
    {
      // 在这里会调用Cluster的构造函数进行初始化
      Cluster cl(cclusters, point_clusters[i], dt, world_frame, ego_pose);
      cclusters++;
      clusters.push_back(cl);
    }
  }

  //Visualizations and msg publications
  visualization_msgs::MarkerArray marker_array;
  msg_datmo::TrackArray track_array_box_kf;
  for (unsigned int i = 0; i < clusters.size(); i++)
  {

    track_array_box_kf.tracks.push_back(clusters[i].msg_track_box_kf);

    if (p_marker_pub)
    {
      marker_array.markers.push_back(clusters[i].getClosestCornerPointVisualisationMessage());
      marker_array.markers.push_back(clusters[i].getBoundingBoxCenterVisualisationMessage());
      marker_array.markers.push_back(clusters[i].getArrowVisualisationMessage());
      marker_array.markers.push_back(clusters[i].getThetaL1VisualisationMessage());
      marker_array.markers.push_back(clusters[i].getThetaL2VisualisationMessage());
      marker_array.markers.push_back(clusters[i].getThetaBoxVisualisationMessage());
      marker_array.markers.push_back(clusters[i].getClusterVisualisationMessage());
      marker_array.markers.push_back(clusters[i].getBoundingBoxVisualisationMessage());
      marker_array.markers.push_back(clusters[i].getBoxModelKFVisualisationMessage());
      marker_array.markers.push_back(clusters[i].getLShapeVisualisationMessage());
      marker_array.markers.push_back(clusters[i].getLineVisualisationMessage());
      marker_array.markers.push_back(clusters[i].getBoxSolidVisualisationMessage());
    };
  }

  pub_marker_array.publish(marker_array);
  pub_tracks_box_kf.publish(track_array_box_kf);
  visualiseGroupedPoints(point_clusters);
  // visualiseGroupedPoints();
  // }
  // else
  // { //If the tf is not possible init all states at 0
  //   ROS_WARN_DELAYED_THROTTLE(1, "No transform could be found between %s and %s", lidar_frame.c_str(), world_frame.c_str());
  // }
}

void Datmo::visualiseGroupedPoints(const vector<pointList> &point_clusters)
{
  //Publishing the clusters with different colors
  visualization_msgs::MarkerArray marker_array;
  //Populate grouped points message
  visualization_msgs::Marker gpoints;
  gpoints.header.frame_id = world_frame;
  gpoints.header.stamp = ros::Time::now();
  gpoints.ns = "clustered_points";
  gpoints.action = visualization_msgs::Marker::ADD;
  gpoints.pose.orientation.w = 1.0;
  gpoints.type = visualization_msgs::Marker::POINTS;
  // POINTS markers use x and y scale for width/height respectively
  gpoints.scale.x = 0.04;
  gpoints.scale.y = 0.04;
  for (unsigned int i = 0; i < point_clusters.size(); ++i)
  {

    gpoints.id = cg;
    cg++;
    gpoints.color.g = rand() / double(RAND_MAX);
    gpoints.color.b = rand() / double(RAND_MAX);
    gpoints.color.r = rand() / double(RAND_MAX);
    gpoints.color.a = 1.0;
    //gpoints.lifetime = ros::Duration(0.08);
    for (unsigned int j = 0; j < point_clusters[i].size(); ++j)
    {
      geometry_msgs::Point p;
      p.x = point_clusters[i][j].first;
      p.y = point_clusters[i][j].second;
      p.z = 0;
      gpoints.points.push_back(p);
    }
    marker_array.markers.push_back(gpoints);
    gpoints.points.clear();
  }
  // depth_cluster._cloud->clear();
  pub_marker_array.publish(marker_array);
}
void Datmo::visualiseGroupedPoints()
{
  //Publishing the clusters with different colors
  visualization_msgs::MarkerArray marker_array;
  //Populate grouped points message
  visualization_msgs::Marker gpoints;
  gpoints.header.frame_id = world_frame;
  gpoints.header.stamp = ros::Time::now();
  gpoints.ns = "raw_points";
  gpoints.action = visualization_msgs::Marker::ADD;
  gpoints.pose.orientation.w = 1.0;
  gpoints.type = visualization_msgs::Marker::POINTS;
  // POINTS markers use x and y scale for width/height respectively
  gpoints.scale.x = 0.04;
  gpoints.scale.y = 0.04;
  // for (unsigned int i = 0; i < depth_cluster._cloud->size(); ++i)
  // {

  //   auto point = depth_cluster._cloud->points()[i];
  //   gpoints.id = cr;
  //   cr++;
  //   gpoints.color.g = 227;
  //   gpoints.color.b = 132;
  //   gpoints.color.r = 255;
  //   gpoints.color.a = 1.0;
  //   //gpoints.lifetime = ros::Duration(0.08);
  //   geometry_msgs::Point p;
  //   p.x = point.y();
  //   p.y = -point.x();
  //   p.z = point.z();
  //   gpoints.points.push_back(p);

  //   marker_array.markers.push_back(gpoints);
  //   gpoints.points.clear();
  // }
  // depth_cluster._cloud->clear();
  // pub_marker_array.publish(marker_array);
}
void Datmo::Clustering(const msg_lidar_prep::msg_lidar_prep::ConstPtr &scan_in, vector<pointList> &clusters)
{
  //scan = *scan_in;

  // 开始聚类
  depth_cluster.CloudProjection(scan_in);

  // 遍历聚类点云_obj_cluster
  for (const auto &kv : depth_cluster._obj_cluster)
  {
    pointList cluster_datmo;
    const auto &cluster = kv.second;

    // cout << "cluster.size():size:" << cluster.size() << endl;
    double x, y;
    for (int i = 0; i < cluster.size(); ++i)
    {

      //msg_lidar_prep::LidarPoints tem;
      //Point richpoint;

      // richpoint.x() = cluster.at(i).x();
      // richpoint.y() = cluster.at(i).y();
      // richpoint.z() = cluster.at(i).z();
      x = cluster.at(i).x();
      y = cluster.at(i).y();
      //cluster.push_back(richpoint);
      cluster_datmo.push_back(Point(x, y));
    }
    clusters.push_back(cluster_datmo);
  }
  depth_cluster._obj_cluster.clear();
}
void Datmo::transformPointList(const pointList &in, pointList &out)
{
  //This funcion transforms pointlist between coordinate frames and it is a wrapper for the
  //transformPoint function
  //There is not try catch block because it is supposed to be already encompassed into one

  geometry_msgs::PointStamped point_in, point_out;
  Point point;
  point_in.header.frame_id = lidar_frame;
  // 不可以把ros::Time(0)改成ros::time::now()，因为监听做不到实时，会有几毫秒的延迟
  // ros::Time(0)指最近时刻存储的数据; ros::time::now()则指当下
  // 非要使用ros::time::now,则需要结合waitForTransform()使用
  point_in.header.stamp = ros::Time(0);
  for (unsigned int i = 0; i < in.size(); ++i)
  {
    point_in.point.x = in[i].first;
    point_in.point.y = in[i].second;
    // 将一个坐标系下的点的坐标转换到另一个坐标系下
    // 你首先需要定义point_in.header.frame_id所属的坐标系
    tf_listener.transformPoint(world_frame, point_in, point_out);
    point.first = point_out.point.x;
    point.second = point_out.point.y;
    out.push_back(point);
  }
}
