//=========================================================================
//
// Copyright 2018 Kitware, Inc.
// Author: Guilbert Pierre (spguilbert@gmail.com)
// Date: 03-27-2018
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//=========================================================================

// This slam algorithm is inspired by the LOAM algorithm:
// J. Zhang and S. Singh. LOAM: Lidar Odometry and Mapping in Real-time.
// Robotics: Science and Systems Conference (RSS). Berkeley, CA, July 2014.

// The algorithm is composed of three sequential steps:
//
// - Keypoints extraction: this step consists of extracting keypoints over
// the points clouds. To do that, the laser lines / scans are trated indepently.
// The laser lines are projected onto the XY plane and are rescale depending on
// their vertical angle. Then we compute their curvature and create two class of
// keypoints. The edges keypoints which correspond to points with a hight curvature
// and planar points which correspond to points with a low curvature.
//
// - Ego-Motion: this step consists of recovering the motion of the lidar
// sensor between two frames (two sweeps). The motion is modelized by a constant
// velocity and angular velocity between two frames (i.e null acceleration).
// Hence, we can parameterize the motion by a rotation and translation per sweep / frame
// and interpolate the transformation inside a frame using the timestamp of the points.
// Since the points clouds generated by a lidar are sparses we can't design a
// pairwise match between keypoints of two successive frames. Hence, we decided to use
// a closest-point matching between the keypoints of the current frame
// and the geometrics features derived from the keypoints of the previous frame.
// The geometrics features are lines or planes and are computed using the edges keypoints
// and planar keypoints of the previous frame. Once the matching is done, a keypoint
// of the current frame is matched with a plane / line (depending of the
// nature of the keypoint) from the previous frame. Then, we recover R and T by
// minimizing the function f(R, T) = sum(d(point, line)^2) + sum(d(point, plane)^2).
// Which can be writen f(R, T) = sum((R*X+T-P).t*A*(R*X+T-P)) where:
// - X is a keypoint of the current frame
// - P is a point of the corresponding line / plane
// - A = (n*n.t) with n being the normal of the plane
// - A = (I - n*n.t).t * (I - n*n.t) with n being a director vector of the line
// Since the function f(R, T) is a non-linear mean square error function
// we decided to use the Levenberg-Marquardt algorithm to recover its argmin.
//
// - Mapping: This step consists of refining the motion recovered in the Ego-Motion
// step and to add the new frame in the environment map. Thanks to the ego-motion
// recovered at the previous step it is now possible to estimate the new position of
// the sensor in the map. We use this estimation as an initial point (R0, T0) and we
// perform an optimization again using the keypoints of the current frame and the matched
// keypoints of the map (and not only the previous frame this time!). Once the position in the
// map has been refined from the first estimation it is then possible to update the map by
// adding the keypoints of the current frame into the map.
//
// In the following programs : "slam" and "slam.cxx" the lidar
// coordinate system {L} is a 3D coordinate system with its origin at the
// geometric center of the lidar. The world coordinate system {W} is a 3D
// coordinate system which coinciding with {L] at the initial position. The
// points will be denoted by the ending letter L or W if they belong to
// the corresponding coordinate system

// LOCAL
#include "Slam.h"
#include "CeresCostFunctions.h"
#include "vtkEigenTools.h"
// STD
#include <sstream>
#include <algorithm>
#include <cmath>
#include <ctime>
// EIGEN
#include <Eigen/Dense>
// PCL
#include <pcl/filters/voxel_grid.h>
// CERES
#include <ceres/ceres.h>
// NANOFLANN
#include <nanoflann.hpp>

namespace {
//-----------------------------------------------------------------------------
Eigen::Matrix3d GetRotationMatrix(Eigen::Matrix<double, 6, 1> T)
{
  return Eigen::Matrix3d(
          Eigen::AngleAxisd(T(2), Eigen::Vector3d::UnitZ())     /* rotation around Z-axis */
        * Eigen::AngleAxisd(T(1), Eigen::Vector3d::UnitY())     /* rotation around Y-axis */
        * Eigen::AngleAxisd(T(0), Eigen::Vector3d::UnitX()));   /* rotation around X-axis */
}

//-----------------------------------------------------------------------------
std::clock_t startTime;

//-----------------------------------------------------------------------------
void InitTime()
{
  startTime = std::clock();
}

//-----------------------------------------------------------------------------
void StopTimeAndDisplay(std::string functionName)
{
  std::clock_t endTime = std::clock();
  double dt = static_cast<double>(endTime - startTime) / CLOCKS_PER_SEC;
  std::cout << "  -time elapsed in function <" << functionName << "> : " << dt << " sec" << std::endl;
}

//-----------------------------------------------------------------------------
double Rad2Deg(double val)
{
  return val / M_PI * 180;
}
}

// The map reconstructed from the slam algorithm is stored in a voxel grid
// which split the space in differents region. From this voxel grid it is possible
// to only load the parts of the map which are pertinents when we run the mapping
// optimization algorithm. Morevover, when a a region of the space is too far from
// the current sensor position it is possible to remove the points stored in this region
// and to move the voxel grid in a closest region of the sensor position. This is used
// to decrease the memory used by the algorithm
class RollingGrid {

public:
  RollingGrid() {}

  RollingGrid(double posX, double posY, double posZ)
  {
    // should initialize using Tworld + size / 2
    this->VoxelGridPosition[0] = static_cast<int>(posX);
    this->VoxelGridPosition[1] = static_cast<int>(posY);
    this->VoxelGridPosition[2] = static_cast<int>(posZ);;
  }

  // roll the grid to enable adding new point cloud
  void Roll(Eigen::Matrix<double, 6, 1> &T)
  {
    // Very basic implementation where the grid is not circular

    // compute the position of the new frame center in the grid
    int frameCenterX = std::floor(T[3] / this->VoxelSize) - this->VoxelGridPosition[0];
    int frameCenterY = std::floor(T[4] / this->VoxelSize) - this->VoxelGridPosition[1];
    int frameCenterZ = std::floor(T[5] / this->VoxelSize) - this->VoxelGridPosition[2];

    // shift the voxel grid to the left
    while (frameCenterX - std::ceil(this->PointCloudSize / 2) <= 0)
    {
      for (int j = 0; j < this->VoxelSize; j++)
      {
        for (int k = 0; k < this->VoxelSize; k++)
        {
          for (int i = this->VoxelSize - 1; i > 0; i--)
          {
            this->grid[i][j][k] = this->grid[i-1][j][k];
          }
          this->grid[0][j][k].reset(new pcl::PointCloud<Slam::Point>());
        }
      }
      frameCenterX++;
      this->VoxelGridPosition[0]--;
    }

    // shift the voxel grid to the right
    while (frameCenterX + std::ceil(this->PointCloudSize / 2) >= this->VoxelSize - 1)
    {
      for (int j = 0; j < this->VoxelSize; j++)
      {
        for (int k = 0; k < this->VoxelSize; k++)
        {
          for (int i = 0; i < this->VoxelSize - 1; i++)
          {
            this->grid[i][j][k] = this->grid[i+1][j][k];
          }
          this->grid[VoxelSize-1][j][k].reset(new pcl::PointCloud<Slam::Point>());
        }
      }
      frameCenterX--;
      this->VoxelGridPosition[0]++;
    }

    // shift the voxel grid to the bottom
    while (frameCenterY - std::ceil(this->PointCloudSize / 2) <= 0)
    {
      for (int i = 0; i < this->VoxelSize; i++)
      {
        for (int k = 0; k < this->VoxelSize; k++)
        {
          for (int j = this->VoxelSize - 1; j > 0; j--)
          {
            this->grid[i][j][k] = this->grid[i][j-1][k];
          }
          this->grid[i][0][k].reset(new pcl::PointCloud<Slam::Point>());
        }
      }
      frameCenterY++;
      this->VoxelGridPosition[1]--;
//      cout << "bottom";
    }

    // shift the voxel grid to the top
    while (frameCenterY + std::ceil(this->PointCloudSize / 2) >= this->VoxelSize - 1)
    {
      for (int i = 0; i < this->VoxelSize; i++)
      {
        for (int k = 0; k < this->VoxelSize; k++)
        {
          for (int j = 0; j < this->VoxelSize - 1; j++)
          {
            this->grid[i][j][k] = this->grid[i][j+1][k];
          }
          this->grid[i][VoxelSize-1][k].reset(new pcl::PointCloud<Slam::Point>());
        }
      }
      frameCenterY--;
      this->VoxelGridPosition[1]++;
    }

    // shift the voxel grid to the "camera"
    while (frameCenterZ - std::ceil(this->PointCloudSize / 2) <= 0)
    {
      for (int i = 0; i < this->VoxelSize; i++)
      {
        for (int j = 0; j < this->VoxelSize; j++)
        {
          for (int k = this->VoxelSize - 1; k > 0; k--)
          {
            this->grid[i][j][k] = this->grid[i][j][k-1];
          }
          this->grid[i][j][0].reset(new pcl::PointCloud<Slam::Point>());
        }
      }
      frameCenterZ++;
      this->VoxelGridPosition[2]--;
    }

    // shift the voxel grid to the "horizon"
    while (frameCenterZ + std::ceil(this->PointCloudSize  / 2) >= this->VoxelSize - 1)
    {
      for (int i = 0; i < this->VoxelSize; i++)
      {
        for (int j = 0; j < this->VoxelSize; j++)
        {
          for (int k = 0; k < this->VoxelSize - 1; k++)
          {
            this->grid[i][j][k] = this->grid[i][j][k+1];
          }
          this->grid[i][j][VoxelSize-1].reset(new pcl::PointCloud<Slam::Point>());
        }
      }
      frameCenterZ--;
      this->VoxelGridPosition[2]++;
    }
  }

  // get points arround T
  pcl::PointCloud<Slam::Point>::Ptr Get(Eigen::Matrix<double, 6, 1> &T)
  {
    // compute the position of the new frame center in the grid
    int frameCenterX = std::floor(T[3] / this->VoxelSize) - this->VoxelGridPosition[0];
    int frameCenterY = std::floor(T[4] / this->VoxelSize) - this->VoxelGridPosition[1];
    int frameCenterZ = std::floor(T[5] / this->VoxelSize) - this->VoxelGridPosition[2];

    pcl::PointCloud<Slam::Point>::Ptr intersection(new pcl::PointCloud<Slam::Point>);

    // Get all voxel in intersection should use ceil here
    for (int i = frameCenterX - std::ceil(this->PointCloudSize / 2); i <= frameCenterX + std::ceil(this->PointCloudSize / 2); i++)
    {
      for (int j = frameCenterY - std::ceil(this->PointCloudSize / 2); j <= frameCenterY + std::ceil(this->PointCloudSize / 2); j++)
      {
        for (int k = frameCenterZ - std::ceil(this->PointCloudSize / 2); k <= frameCenterZ + std::ceil(this->PointCloudSize / 2); k++)
        {
          if (i < 0 || i > (this->VoxelSize - 1) ||
              j < 0 || j > (this->VoxelSize - 1) ||
              k < 0 || k > (this->VoxelSize - 1))
          {
            continue;
          }
          pcl::PointCloud<Slam::Point>:: Ptr voxel = this->grid[i][j][k];
          for (unsigned int l = 0; l < voxel->size(); l++)
          {
            intersection->push_back(voxel->at(l));
          }
        }
      }
    }
    return intersection;
  }

  // get all points
  pcl::PointCloud<Slam::Point>::Ptr Get()
  {
    pcl::PointCloud<Slam::Point>::Ptr intersection(new pcl::PointCloud<Slam::Point>);

    // Get all voxel in intersection should use ceil here
    for (int i = 0; i < VoxelSize; i++)
    {
      for (int j = 0; j < VoxelSize; j++)
      {
        for (int k = 0; k < VoxelSize; k++)
        {
          pcl::PointCloud<Slam::Point>:: Ptr voxel = this->grid[i][j][k];
          for (unsigned int l = 0; l < voxel->size(); l++)
          {
            intersection->push_back(voxel->at(l));
          }
        }
      }
    }
    return intersection;
  }

  // add some points to the grid
  void Add(pcl::PointCloud<Slam::Point>::Ptr pointcloud)
  {
    if (pointcloud->size() == 0)
    {
      std::cout << "Pointcloud empty, voxel grid not updated" << std::endl;
      return;
    }

    // Voxel to filte because new points were add
    std::vector<std::vector<std::vector<int> > > voxelToFilter(VoxelSize, std::vector<std::vector<int> >(VoxelSize, std::vector<int>(VoxelSize, 0)));

    // Add points in the rolling grid
    int outlier = 0; // point who are not in the rolling grid
    for (unsigned int i = 0; i < pointcloud->size(); i++)
    {
      Slam::Point pts = pointcloud->points[i];
      // find the closest coordinate
      int cubeIdxX = std::floor(pts.x / this->VoxelSize) - this->VoxelGridPosition[0];
      int cubeIdxY = std::floor(pts.y / this->VoxelSize) - this->VoxelGridPosition[1];
      int cubeIdxZ = std::floor(pts.z / this->VoxelSize) - this->VoxelGridPosition[2];


      if (cubeIdxX >= 0 && cubeIdxX < this->VoxelSize &&
        cubeIdxY >= 0 && cubeIdxY < this->VoxelSize &&
        cubeIdxZ >= 0 && cubeIdxZ < this->VoxelSize)
      {
        voxelToFilter[cubeIdxX][cubeIdxY][cubeIdxZ] = 1;
        grid[cubeIdxX][cubeIdxY][cubeIdxZ]->push_back(pts);
      }
      else
      {
        outlier++;
      }
    }

    // Filter the modified pointCloud
    pcl::VoxelGrid<Slam::Point> downSizeFilter;
    downSizeFilter.setLeafSize(this->LeafSize, this->LeafSize, this->LeafSize);
    for (int i = 0; i < this->VoxelSize; i++)
    {
      for (int j = 0; j < this->VoxelSize; j++)
      {
        for (int k = 0; k < this->VoxelSize; k++)
        {
          if (voxelToFilter[i][j][k] == 1)
          {
            pcl::PointCloud<Slam::Point>::Ptr tmp(new pcl::PointCloud<Slam::Point>());
            downSizeFilter.setInputCloud(grid[i][j][k]);
            downSizeFilter.filter(*tmp);
            grid[i][j][k] = tmp;
          }
        }
      }
    }
  }


  void SetPointCoudMaxRange(const double maxdist)
  {
    this->PointCloudSize = 2.0 * std::ceil(maxdist / this->VoxelResolution);
  }

  void SetSize(int size)
  {
    this->VoxelSize = size;
    grid.resize(this->VoxelSize);
    for (int i = 0; i < this->VoxelSize; i++)
    {
      grid[i].resize(this->VoxelSize);
      for (int j = 0; j < this->VoxelSize; j++)
      {
        grid[i][j].resize(this->VoxelSize);
        for (int k = 0; k < this->VoxelSize; k++)
        {
          grid[i][j][k].reset(new pcl::PointCloud<Slam::Point>());
        }
      }
    }
  }

  void SetResolution(double resolution) { this->VoxelResolution = resolution; }

  void SetLeafSize(double size) { this->LeafSize = size; }

private:
  //! Size of the voxel grid: n*n*n voxels
  int VoxelSize = 50;

  //! Resolution of a voxel
  double VoxelResolution = 10;

  //! Size of a pointcloud in voxel
  int PointCloudSize = 25;

  //! Size of the leaf use to downsample the pointcloud
  double LeafSize = 0.2;

  //! VoxelGrid of pointcloud
  std::vector<std::vector<std::vector<pcl::PointCloud<Slam::Point>::Ptr> > > grid;

  // Position of the VoxelGrid
  int VoxelGridPosition[3] = {0,0,0};
};

//-----------------------------------------------------------------------------
Slam::Slam()
{
  this->Reset();
}

//-----------------------------------------------------------------------------
void Slam::Reset()
{
  this->EdgesPointsLocalMap = std::make_shared<RollingGrid>();
  this->PlanarPointsLocalMap = std::make_shared<RollingGrid>();
  this->BlobsPointsLocalMap = std::make_shared<RollingGrid>();

  this->EdgesPointsLocalMap->SetResolution(10);
  this->PlanarPointsLocalMap->SetResolution(10);
  this->BlobsPointsLocalMap->SetResolution(10);

  this->EdgesPointsLocalMap->SetSize(50);
  this->PlanarPointsLocalMap->SetSize(50);
  this->BlobsPointsLocalMap->SetSize(50);

  this->NbrFrameProcessed = 0;

  // n-DoF parameters
  this->Tworld = Eigen::Matrix<double, 6, 1>::Zero();
  this->Trelative = Eigen::Matrix<double, 6, 1>::Zero();
  this->MotionParametersEgoMotion = Eigen::VectorXd::Zero(12, 1);
  this->MotionParametersMapping = Eigen::VectorXd::Zero(12, 1);

  this->SetVoxelGridLeafSizeEdges(0.45);
  this->SetVoxelGridLeafSizePlanes(0.6);
  this->SetVoxelGridLeafSizeBlobs(0.12);
}

//-----------------------------------------------------------------------------
Transform Slam::GetWorldTransform()
{
  Transform t;

  t.x = this->Tworld(3);
  t.y = this->Tworld(4);
  t.z = this->Tworld(5);

  Eigen::Matrix3d Rw = GetRotationMatrix(this->Tworld);
  t.rx = std::atan2(Rw(2, 1), Rw(2, 2));
  t.ry = -std::asin(Rw(2, 0));
  t.rz = std::atan2(Rw(1, 0), Rw(0, 0));

  return t;
}

//-----------------------------------------------------------------------------
std::vector<double> Slam::GetTransformCovariance()
{
  std::vector<double> cov(36);
  std::copy(this->TworldCovariance.data(), this->TworldCovariance.data() + 36, cov.data());
  return cov;
}

//-----------------------------------------------------------------------------
std::unordered_map<std::string, double> Slam::GetDebugInformation()
{
  std::unordered_map<std::string, double> map;
  map["EgoMotion: edges used"] = this->EgoMotionEdgesPointsUsed;
  map["EgoMotion: planes used"] = this->EgoMotionPlanesPointsUsed;
  map["Mapping: edges used"] = this->MappingEdgesPointsUsed;
  map["Mapping: planes used"] = this->MappingPlanesPointsUsed;
  map["Mapping: blobs used"] = this->MappingBlobsPointsUsed;
  map["Mapping: variance error"] = this->MappingVarianceError;
  return map;
}

//-----------------------------------------------------------------------------
pcl::PointCloud<PointXYZTIId>::Ptr Slam::GetEdgesMap()
{
  return this->EdgesPointsLocalMap->Get();
}

//-----------------------------------------------------------------------------
pcl::PointCloud<Slam::Point>::Ptr Slam::GetPlanarsMap()
{
  return this->PlanarPointsLocalMap->Get();
}

//-----------------------------------------------------------------------------
pcl::PointCloud<Slam::Point>::Ptr Slam::GetBlobsMap()
{
  return this->BlobsPointsLocalMap->Get();
}

//-----------------------------------------------------------------------------
void Slam::AddFrame(pcl::PointCloud<Slam::Point>::Ptr pc, std::vector<size_t> laserIdMapping)
{
  if (pc->size() == 0)
  {
    std::cout << "Slam entry is an empty pointcloud" << std::endl;
    return;
  }

  std::cout << "#########################################################" << std::endl
            << "Processing frame : " << this->NbrFrameProcessed << std:: endl
            << "#########################################################" << std::endl
            << std::endl;

  double time = pc->points[0].time;

  // If the new frame is the first one we just add the
  // extracted keypoints into the map without running
  // odometry and mapping steps
  if (this->NbrFrameProcessed == 0)
  {
    // Compute the edges and planars keypoints
    this->KeyPointsExtractor->ComputeKeyPoints(pc, laserIdMapping);
    this->CurrentEdgesPoints = this->KeyPointsExtractor->GetEdgePoints();
    this->CurrentPlanarsPoints = this->KeyPointsExtractor->GetPlanarPoints();
    this->CurrentBlobsPoints = this->KeyPointsExtractor->GetBlobPoints();

    // update map using tworld
    this->UpdateMapsUsingTworld();

    // Current keypoints become previous ones
    this->PreviousEdgesPoints = this->CurrentEdgesPoints;
    this->PreviousPlanarsPoints = this->CurrentPlanarsPoints;
    this->PreviousBlobsPoints = this->CurrentBlobsPoints;
    this->NbrFrameProcessed++;
    return;
  }

  // Compute the edges and planars keypoints
  InitTime();
  this->KeyPointsExtractor->ComputeKeyPoints(pc, laserIdMapping);
  this->CurrentEdgesPoints = this->KeyPointsExtractor->GetEdgePoints();
  this->CurrentPlanarsPoints = this->KeyPointsExtractor->GetPlanarPoints();
  this->CurrentBlobsPoints = this->KeyPointsExtractor->GetBlobPoints();
  StopTimeAndDisplay("Keypoints extraction");

  // Perfom EgoMotion
  InitTime();
  this->ComputeEgoMotion();
  StopTimeAndDisplay("Ego-Motion");

  // Transform the current keypoints to the
  // referential of the sensor at the end of
  // frame acquisition
  InitTime();
  //this->TransformCurrentKeypointsToEnd();
  StopTimeAndDisplay("Undistortion");

  // Perform Mapping
  InitTime();
  this->Mapping();
  StopTimeAndDisplay("Mapping");

  // Current keypoints become previous ones
  this->PreviousEdgesPoints = this->CurrentEdgesPoints;
  this->PreviousPlanarsPoints = this->CurrentPlanarsPoints;
  this->NbrFrameProcessed++;

  // Motion and localization parameters estimation information display
  Eigen::Vector3d angles, trans;
  angles << Rad2Deg(this->Trelative(0)), Rad2Deg(this->Trelative(1)), Rad2Deg(this->Trelative(2));
  trans << this->Trelative(3), this->Trelative(4), this->Trelative(5);
  std::cout << "Ego-Motion estimation: angles = [" << angles.transpose() << "] translation: [" << trans.transpose() << "]" << std::endl;
  angles << Rad2Deg(this->Tworld(0)), Rad2Deg(this->Tworld(1)), Rad2Deg(this->Tworld(2));
  trans << this->Tworld(3), this->Tworld(4), this->Tworld(5);
  std::cout << "Localiazion estimation: angles = [" << angles.transpose() << "] translation: [" << trans.transpose() << "]"
            << std::endl << std::endl << std::endl;

  // Update Trajectory
  this->Trajectory.emplace_back(Transform(time, this->Tworld));
}

//-----------------------------------------------------------------------------
void Slam::TransformToWorld(Point& p)
{
  if (this->Undistortion)
  {
    this->ExpressPointInOtherReferencial(p);
  }
  else
  {
    // Rotation and translation and points
    Eigen::Matrix3d Rw;
    Eigen::Vector3d Tw;
    Eigen::Vector3d P;

    Rw = GetRotationMatrix(this->Tworld);
    Tw << this->Tworld(3), this->Tworld(4), this->Tworld(5);
    P << p.x, p.y, p.z;

    P = Rw * P + Tw;

    p.x = P(0);
    p.y = P(1);
    p.z = P(2);
  }
}

//-----------------------------------------------------------------------------
int Slam::ComputeLineDistanceParameters(KDTreePCLAdaptor& kdtreePreviousEdges, Eigen::Matrix3d& R,
                                           Eigen::Vector3d& dT, Point p, MatchingMode matchingMode)
{
  // number of neighbors edge points required to approximate
  // the corresponding egde line
  unsigned int requiredNearest;
  unsigned int eigenValuesRatio;
  std::vector<int> nearestIndex;
  std::vector<float> nearestDist;

  // maximum distance between keypoints
  // and their computed line
  double squaredMaxDist;

  // Transform the point using the current pose estimation
  Eigen::Vector3d P0(p.x, p.y, p.z);
  Eigen::Vector3d P, n;
  Eigen::Matrix3d A;
  // Time continious motion model to take
  // into account the rolling shutter distortion
  if (this->Undistortion)
  {
    this->ExpressPointInOtherReferencial(p);
  }
  // Rigid transform
  else
  {
    P = R * P0 + dT;
    p.x = P(0); p.y = P(1); p.z = P(2);
  }

  if (matchingMode == MatchingMode::EgoMotion)
  {
    requiredNearest = this->EgoMotionLineDistanceNbrNeighbors;
    eigenValuesRatio = this->EgoMotionLineDistancefactor;
    squaredMaxDist = std::pow(this->EgoMotionMaxLineDistance, 2);
    GetEgoMotionLineSpecificNeighbor(nearestIndex, nearestDist, requiredNearest, kdtreePreviousEdges, p);
    if (nearestIndex.size() < this->EgoMotionMinimumLineNeighborRejection)
    {
      return 0;
    }
    requiredNearest = nearestIndex.size();
  }
  else if (matchingMode == MatchingMode::Mapping)
  {
    requiredNearest = this->MappingLineDistanceNbrNeighbors;
    eigenValuesRatio = this->MappingLineDistancefactor;
    squaredMaxDist = std::pow(this->MappingMaxLineDistance, 2);
    GetMappingLineSpecificNeigbbor(nearestIndex, nearestDist, this->MappingLineMaxDistInlier, requiredNearest, kdtreePreviousEdges, p);
    if (nearestIndex.size() < this->MappingMinimumLineNeighborRejection)
    {
      return 0;
    }
    requiredNearest = nearestIndex.size();
  }
  else
  {
    throw "ComputeLineDistanceParameters function got invalide step parameter";
  }

  // if the nearest edges are too far from the
  // current edge keypoint we skip this point.
  if (nearestDist[requiredNearest - 1] > this->MaxDistanceForICPMatching)
  {
    return 1;
  }

  // Compute PCA to determine best line approximation
  // of the requiredNearest nearest edges points extracted
  // Thans to the PCA we will check the shape of the neighborhood
  // and keep it if it is distributed along a line
  Eigen::MatrixXd data(requiredNearest, 3);
  for (unsigned int k = 0; k < requiredNearest; k++)
  {
    Point pt = kdtreePreviousEdges.getInputCloud()->points[nearestIndex[k]];
    data.row(k) << pt.x, pt.y, pt.z;
  }

  Eigen::Vector3d mean = data.colwise().mean();
  Eigen::MatrixXd centered = data.rowwise() - mean.transpose();
  Eigen::Matrix3d varianceCovariance = centered.transpose() * centered;
  Eigen::SelfAdjointEigenSolver<Eigen::Matrix3d> eig(varianceCovariance);

  // Eigen values
  Eigen::MatrixXd D = eig.eigenvalues();

  // if the first eigen value is significantly higher than
  // the second one, it means the sourrounding points are
  // distributed on a edge line
  if (D(2) > eigenValuesRatio * D(1))
  {
    // n is the director vector of the line
    n = eig.eigenvectors().col(2);
  }
  else
  {
    return 2;
  }

  // A = (I-n*n.t).t * (I-n*n.t) = (I - n*n.t)^2
  // since (I-n*n.t) is a symmetric matrix
  // Then it comes A (I-n*n.t)^2 = (I-n*n.t) since
  // A is the matrix of a projection endomorphism
  A = (this->I3 - n * n.transpose());

  // it would be the case if P1 = P2 For instance
  // if the sensor has some dual returns that hit the same point
  if (!std::isfinite(A(0, 0)))
  {
    return 3;
  }

  // Evaluate the distance from the fitted line distribution
  // of the neighborhood
  Eigen::Vector3d Xtemp;
  Point pt;
  double meanSquaredDist = 0;
  for (unsigned int k = 0; k < requiredNearest; ++k)
  {
    pt = kdtreePreviousEdges.getInputCloud()->points[nearestIndex[k]];
    Xtemp(0) = pt.x; Xtemp(1) = pt.y; Xtemp(2) = pt.z;
    double squaredDist = (Xtemp - mean).transpose() * A * (Xtemp - mean);
    if (squaredDist > squaredMaxDist)
    {
      return 4;
    }
    meanSquaredDist += squaredDist;
  }
  meanSquaredDist /= static_cast<double>(requiredNearest);
  double fitQualityCoeff = 1.0 - std::sqrt(std::abs(meanSquaredDist) / squaredMaxDist);

  // s represents the quality of the match
  double s = fitQualityCoeff;

  // store the distance parameters values
  this->Avalues.emplace_back(A);
  this->Pvalues.emplace_back(mean);
  this->Xvalues.emplace_back(P0);
  this->TimeValues.emplace_back(p.intensity);
  this->residualCoefficient.emplace_back(s);
  return 6;
}

//-----------------------------------------------------------------------------
int Slam::ComputePlaneDistanceParameters(KDTreePCLAdaptor& kdtreePreviousPlanes, Eigen::Matrix3d& R,
                                            Eigen::Vector3d& dT, Point p, MatchingMode matchingMode)
{
  // number of neighbors edge points required to approximate
  // the corresponding egde line
  unsigned int requiredNearest;
  unsigned int significantlyFactor1, significantlyFactor2;

  // maximum distance between keypoints
  // and their computed plane
  double squaredMaxDist;

  if (matchingMode == MatchingMode::EgoMotion)
  {
    significantlyFactor1 = this->EgoMotionPlaneDistancefactor1;
    significantlyFactor2 = this->EgoMotionPlaneDistancefactor2;
    requiredNearest = this->EgoMotionPlaneDistanceNbrNeighbors;
    squaredMaxDist = std::pow(this->EgoMotionMaxPlaneDistance, 2);
  }
  else if (matchingMode == MatchingMode::Mapping)
  {
    significantlyFactor1 = this->MappingPlaneDistancefactor1;
    significantlyFactor2 = this->MappingPlaneDistancefactor2;
    requiredNearest = this->MappingPlaneDistanceNbrNeighbors;
    squaredMaxDist = std::pow(this->MappingMaxPlaneDistance, 2);
  }
  else
  {
    throw "ComputeLineDistanceParameters function got invalide step parameter";
  }

  Eigen::Vector3d P, n;
  Eigen::Matrix3d A;

  // Transform the point using the current pose estimation
  Eigen::Vector3d P0(p.x, p.y, p.z);

  // Time continious motion model to take
  // into account the rolling shutter distortion
  if (this->Undistortion)
  {
    this->ExpressPointInOtherReferencial(p);
  }
  // Rigid transform
  else
  {
    P = R * P0 + dT;
    p.x = P(0); p.y = P(1); p.z = P(2);
  }

  std::vector<int> nearestIndex(requiredNearest, -1);
  std::vector<double> nearestDist(requiredNearest, -1.0);
  kdtreePreviousPlanes.query(p, requiredNearest, nearestIndex.data(), nearestDist.data());

  // It means that there is not enought keypoints in the neighbohood
  if (nearestIndex[requiredNearest - 1] == -1)
  {
    return 0;
  }

  // if the nearest planars are too far from the
  // current planar keypoint we skip this point.
  if (nearestDist[requiredNearest - 1] > this->MaxDistanceForICPMatching)
  {
    return 1;
  }

  // Compute PCA to determine best line approximation
  // of the requiredNearest nearest edges points extracted
  // Thanks to the PCA we will check the shape of the neighborhood
  // and keep it if it is distributed along a line
  Eigen::MatrixXd data(requiredNearest,3);
  for (unsigned int k = 0; k < requiredNearest; k++)
  {
    Point pt = kdtreePreviousPlanes.getInputCloud()->points[nearestIndex[k]];
    data.row(k) << pt.x, pt.y, pt.z;
  }
  Eigen::Vector3d mean = data.colwise().mean();
  Eigen::MatrixXd centered = data.rowwise() - mean.transpose();
  Eigen::Matrix3d varianceCovariance = centered.transpose() * centered;
  Eigen::SelfAdjointEigenSolver<Eigen::Matrix3d> eig(varianceCovariance);

  // Eigenvalues
  Eigen::VectorXd D = eig.eigenvalues();

  // if the second eigen value is close to the highest one
  // and bigger than the smallest one it means that the points
  // are distributed among a plane
  if ( (significantlyFactor2 * D(1) > D(2)) && (D(1) > significantlyFactor1 * D(0)) )
  {
    n = eig.eigenvectors().col(0);
  }
  else
  {
    return 2;
  }

  // A = n*n.t
  A = n * n.transpose();

  // it would be the case if P1 = P2, P1 = P3
  // or P3 = P2. For instance if the sensor has
  // some dual returns that hit the same point
  if (!std::isfinite(A(0, 0)))
  {
    return 3;
  }

  Eigen::Vector3d Xtemp;
  Point pt;
  double meanSquaredDist = 0;
  for (unsigned int k = 0; k < requiredNearest; ++k)
  {
    pt = kdtreePreviousPlanes.getInputCloud()->points[nearestIndex[k]];
    Xtemp(0) = pt.x; Xtemp(1) = pt.y; Xtemp(2) = pt.z;
    double squaredDist = (Xtemp - mean).transpose() * A * (Xtemp - mean);
    if (squaredDist > squaredMaxDist)
    {
      return 4;
    }
    meanSquaredDist += squaredDist;
  }
  meanSquaredDist /= static_cast<double>(requiredNearest);
  double fitQualityCoeff = 1.0 - std::sqrt(std::abs(meanSquaredDist) / squaredMaxDist);

  // s represents the quality of the match
  double s = fitQualityCoeff;

  // store the distance parameters values
  this->Avalues.emplace_back(A);
  this->Pvalues.emplace_back(mean);
  this->Xvalues.emplace_back(P0);
  this->residualCoefficient.emplace_back(s);
  this->TimeValues.emplace_back(p.intensity);
  return 6;
}

//-----------------------------------------------------------------------------
int Slam::ComputeBlobsDistanceParameters(pcl::KdTreeFLANN<Slam::Point>::Ptr kdtreePreviousBlobs, Eigen::Matrix3d& R,
                                            Eigen::Vector3d& dT, Point p, MatchingMode /*matchingMode*/)
{
  // number of neighbors blobs points required to approximate
  // the corresponding ellipsoide
  unsigned int requiredNearest = 25;

  // maximum distance between keypoints
  // and its neighbor
  double maxDist = this->MaxDistanceForICPMatching;
  float maxDiameterTol = std::pow(4.0, 2);

  // Usefull variables
  Eigen::Vector3d P0, P, n;
  Eigen::Matrix3d A;

  // Transform the point using the current pose estimation
  P << p.x, p.y, p.z;
  P0 = P;
  P = R * P + dT;
  p.x = P(0); p.y = P(1); p.z = P(2);

  std::vector<int> nearestIndex;
  std::vector<float> nearestDist;
  kdtreePreviousBlobs->nearestKSearch(p, requiredNearest, nearestIndex, nearestDist);

  // It means that there is not enought keypoints in the neighbohood
  if (nearestIndex.size() < requiredNearest)
  {
    return 0;
  }

  // if the nearest blobs is too far from the
  // current blob keypoint we skip this point.
  if (nearestDist[requiredNearest - 1] > maxDist)
  {
    return 1;
  }

  // check the diameter of the neighborhood
  // if the diameter is too big we don't want
  // to keep this blobs. We must do that since
  // the blobs fitted ellipsoide is assume to
  // encode the local neighborhood shape.
  float maxDiameter = 0;
  for (unsigned int i = 0; i < requiredNearest; ++i)
  {
    for (unsigned int j = 0; j < requiredNearest; ++j)
    {
      Point pt1 = kdtreePreviousBlobs->getInputCloud()->points[nearestIndex[i]];
      Point pt2 = kdtreePreviousBlobs->getInputCloud()->points[nearestIndex[j]];
      float neighborhoodDiameter = std::pow(pt1.x - pt2.x, 2) + std::pow(pt1.y - pt2.y, 2) + std::pow(pt1.z - pt2.z, 2);
      maxDiameter = std::max(maxDiameter, neighborhoodDiameter);
    }
  }
  if (maxDiameter > maxDiameterTol)
  {
    return 2;
  }

  // Compute PCA to determine best ellipsoide approximation
  // of the requiredNearest nearest blobs points extracted
  // Thanks to the PCA we will check the shape of the neighborhood
  // tune a distance function adapter to the distribution
  // (Mahalanobis distance)
  Eigen::MatrixXd data(requiredNearest, 3);

  for (unsigned int k = 0; k < requiredNearest; k++)
  {
    Point pt = kdtreePreviousBlobs->getInputCloud()->points[nearestIndex[k]];
    data.row(k) << pt.x, pt.y, pt.z;
  }

  Eigen::Vector3d mean = data.colwise().mean();
  Eigen::MatrixXd centered = data.rowwise() - mean.transpose();
  Eigen::Matrix3d varianceCovariance = centered.transpose() * centered;

  // Sigma is the inverse of the covariance
  // Matrix encoding the mahalanobis distance
  // check that the covariance matrix is inversible
  if (std::abs(varianceCovariance.determinant()) < 1e-6)
  {
    return 3;
  }
  Eigen::Matrix3d sigma = varianceCovariance.inverse();
  Eigen::SelfAdjointEigenSolver<Eigen::Matrix3d> eig(sigma);

  // rescale the variance covariance matrix to preserve the
  // shape of the mahalanobis distance but removing the
  // variance values scaling
  Eigen::MatrixXd D = eig.eigenvalues();
  Eigen::MatrixXd U = eig.eigenvectors();
  D = D / D(2);
  Eigen::Matrix3d diagD = Eigen::Matrix3d::Zero();
  diagD(0, 0) = D(0); diagD(1, 1) = D(1); diagD(2, 2) = D(2);
  A = U * diagD * U.transpose();

  if (!std::isfinite(A.determinant()))
  {
    return 4;
  }

  // Coefficient the distance
  // using the distance between the point
  // and its matching blob; The aim is to prevent
  // wrong matching to pull the point cloud in the
  // bad direction
  double s = 1.0;//1.0 - nearestDist[requiredNearest - 1] / maxDist;

  // store the distance parameters values
  this->Avalues.emplace_back(A);
  this->Pvalues.emplace_back(mean);
  this->Xvalues.emplace_back(P0);
  this->residualCoefficient.emplace_back(s);
  return 5;
}

//-----------------------------------------------------------------------------
void Slam::GetEgoMotionLineSpecificNeighbor(std::vector<int>& nearestValid, std::vector<float>& nearestValidDist,
                                               unsigned int nearestSearch, KDTreePCLAdaptor& kdtreePreviousEdges, Point p)
{
  // clear vector
  nearestValid.clear();
  nearestValid.resize(0);
  nearestValidDist.clear();
  nearestValidDist.resize(0);

  // get nearest neighbor of the query point
  std::vector<int> nearestIndex(nearestSearch, -1);
  std::vector<double> nearestDist(nearestSearch, -1.0);
  kdtreePreviousEdges.query(p, nearestSearch, nearestIndex.data(), nearestDist.data());

  // take the closest point
  std::vector<int> idAlreadyTook(this->KeyPointsExtractor->GetNLasers(), 0);
  Point closest = kdtreePreviousEdges.getInputCloud()->points[nearestIndex[0]];
  nearestValid.push_back(nearestIndex[0]);
  nearestValidDist.push_back(nearestDist[0]);

  // invalid all possible points that
  // are on the same scan line than the
  // closest one
  idAlreadyTook[(int)closest.laserId] = 1;

  // invalid all possible points from scan
  // lines that are too far from the closest one
  for (int k = 0; k < this->KeyPointsExtractor->GetNLasers(); ++k)
  {
    if (std::abs(int(closest.laserId) - k) > 4.0)
    {
      idAlreadyTook[k] = 1;
    }
  }

  // Make a selection among the neighborhood
  // of the query point. We can only take one edge
  // per scan line
  int id;
  for (unsigned int k = 1; k < nearestIndex.size(); ++k)
  {
    id = kdtreePreviousEdges.getInputCloud()->points[nearestIndex[k]].laserId;
    if ( (idAlreadyTook[id] < 1) && (nearestDist[k] < this->MaxDistanceForICPMatching))
    {
      idAlreadyTook[id] = 1;
      nearestValid.push_back(nearestIndex[k]);
      nearestValidDist.push_back(nearestDist[k]);
    }
  }
}

//-----------------------------------------------------------------------------
void Slam::GetMappingLineSpecificNeigbbor(std::vector<int>& nearestValid, std::vector<float>& nearestValidDist, double maxDistInlier,
                                             unsigned int nearestSearch, KDTreePCLAdaptor& kdtreePreviousEdges, Point p)
{
  // reset vectors
  nearestValid.clear();
  nearestValid.resize(0);
  nearestValidDist.clear();
  nearestValidDist.resize(0);

  // to prevent square root when making camparisons
  maxDistInlier = std::pow(maxDistInlier, 2);

  // Take the neighborhood of the query point
  // get nearest neighbor of the query point
  std::vector<int> nearestIndex(nearestSearch, -1);
  std::vector<double> nearestDist(nearestSearch, -1.0);
  kdtreePreviousEdges.query(p, nearestSearch, nearestIndex.data(), nearestDist.data());

  // take the closest point
  std::vector<std::vector<int> > inliersList;
  Point closest = kdtreePreviousEdges.getInputCloud()->points[nearestIndex[0]];
  nearestValid.push_back(nearestIndex[0]);
  nearestValidDist.push_back(nearestDist[0]);

  Eigen::Vector3d P1, P2, dir, Pcdt;
  Eigen::Matrix3d D;
  P1 << closest.x, closest.y, closest.z;
  Point pclP2;
  Point inlierCandidate;

  // Loop over other neighbors of the neighborhood. For each of them
  // compute the line between closest point and current point and
  // compute the number of inlier that fit this line. Keep the line and its
  // inliers with the most inliers
  for (unsigned int ptIndex = 1; ptIndex < nearestIndex.size(); ++ptIndex)
  {
    std::vector<int> inlierIndex;
    pclP2 = kdtreePreviousEdges.getInputCloud()->points[nearestIndex[ptIndex]];
    P2 << pclP2.x, pclP2.y, pclP2.z;
    dir = (P2 - P1).normalized();
    D = this->I3 - dir * dir.transpose();
    D = D.transpose() * D;

    for (unsigned int candidateIndex = 1; candidateIndex < nearestIndex.size(); ++candidateIndex)
    {
      inlierCandidate = kdtreePreviousEdges.getInputCloud()->points[nearestIndex[candidateIndex]];
      Pcdt << inlierCandidate.x, inlierCandidate.y, inlierCandidate.z;
      if ( (Pcdt - P1).transpose() * D * (Pcdt - P1) < maxDistInlier)
      {
        inlierIndex.push_back(candidateIndex);
      }
    }
    inliersList.push_back(inlierIndex);
  }

  std::size_t maxInliers = 0;
  int indexMaxInliers = -1;
  for (unsigned int k = 0; k < inliersList.size(); ++k)
  {
    if (inliersList[k].size() > maxInliers)
    {
      maxInliers = inliersList[k].size();
      indexMaxInliers = k;
    }
  }

  // fill
  for (unsigned int k = 0; k < inliersList[indexMaxInliers].size(); ++k)
  {
    nearestValid.push_back(nearestIndex[inliersList[indexMaxInliers][k]]);
    nearestValidDist.push_back(nearestDist[inliersList[indexMaxInliers][k]]);
  }

  return;
}

//-----------------------------------------------------------------------------
void Slam::ComputeEgoMotion()
{
  // Initialize the IsKeypointUsed vectors
  this->EdgePointRejectionEgoMotion.clear(); this->EdgePointRejectionEgoMotion.resize(this->CurrentEdgesPoints->size());
  this->PlanarPointRejectionEgoMotion.clear(); this->PlanarPointRejectionEgoMotion.resize(this->CurrentPlanarsPoints->size());
  // Check that there is enought points to compute the EgoMotion
  if ((this->CurrentEdgesPoints->size() == 0 || this->PreviousEdgesPoints->size() == 0) &&
      (this->CurrentPlanarsPoints->size() == 0 || this->PreviousPlanarsPoints->size() == 0))
  {
    this->EgoMotionEdgesPointsUsed = 0;
    this->EgoMotionPlanesPointsUsed = 0;
    std::cout << "Not enought keypoints, EgoMotion skipped for this frame" << std::endl;
    return;
  }

  // reset the relative transform
  this->Trelative = Eigen::Matrix<double, 6, 1>::Zero();
  this->MotionParametersEgoMotion = Eigen::VectorXd::Zero(12, 1);

  // kd-tree to process fast nearest neighbor
  // among the keypoints of the previous pointcloud
  KDTreePCLAdaptor kdtreePreviousEdges(this->PreviousEdgesPoints);
  KDTreePCLAdaptor kdtreePreviousPlanes(this->PreviousPlanarsPoints);

  std::cout << "========== Ego-Motion ==========" << std::endl;
  std::cout << "previous edges: " << this->PreviousEdgesPoints->size() << " current edges: " << this->CurrentEdgesPoints->size() << std::endl;
  std::cout << "previous planes: " << this->PreviousPlanarsPoints->size() << " current planes: " << this->CurrentPlanarsPoints->size() << std::endl;

  unsigned int usedEdges = 0;
  unsigned int usedPlanes = 0;
  Point currentPoint;

  unsigned int toReserve =   this->CurrentEdgesPoints->size()
                           + this->CurrentPlanarsPoints->size();
  this->Xvalues.reserve(toReserve);
  this->Avalues.resize(toReserve);
  this->Pvalues.resize(toReserve);
  this->TimeValues.resize(toReserve);
  this->residualCoefficient.resize(toReserve);



  // ICP - Levenberg-Marquardt loop:
  // At each step of this loop an ICP matching is performed
  // Once the keypoints matched, we estimate the the 6-DOF
  // parameters by minimizing a non-linear least square cost
  // function using a Levenberg-Marquardt algorithm
  for (unsigned int icpCount = 0; icpCount < this->EgoMotionICPMaxIter; ++icpCount)
  {
    // Rotation and translation at this step
    Eigen::Matrix3d R = GetRotationMatrix(this->Trelative);
    Eigen::Vector3d T(this->Trelative(3), this->Trelative(4), this->Trelative(5));

    // clear all keypoints matching data
    this->ResetDistanceParameters();

    // Init the undistortion interpolator
    if (this->Undistortion)
    {
      this->CreateWithinFrameTrajectory(this->WithinFrameTrajectory, WithinFrameTrajMode::EgoMotionTraj);
    }

    // loop over edges if there is engought previous edge keypoints
    if (this->PreviousEdgesPoints->size() > this->EgoMotionLineDistanceNbrNeighbors)
    {
      for (unsigned int edgeIndex = 0; edgeIndex < this->CurrentEdgesPoints->size(); ++edgeIndex)
      {
        // Find the closest correspondence edge line of the current edge point
        // Compute the parameters of the point - line distance
        // i.e A = (I - n*n.t)^2 with n being the director vector
        // and P a point of the line
        currentPoint = this->CurrentEdgesPoints->points[edgeIndex];
        int rejectionIndex = this->ComputeLineDistanceParameters(kdtreePreviousEdges, R, T, currentPoint, MatchingMode::EgoMotion);
        this->EdgePointRejectionEgoMotion[edgeIndex] = rejectionIndex;
        this->MatchRejectionHistogramLine[rejectionIndex] += 1;
      }
    }

    // loop over planars if there is enought previous planar keypoints
    if (this->PreviousPlanarsPoints->size() > this->EgoMotionPlaneDistanceNbrNeighbors)
    {
      for (unsigned int planarIndex = 0; planarIndex < this->CurrentPlanarsPoints->size(); ++planarIndex)
      {
        // Find the closest correspondence plane of the current planar point
        // Compute the parameters of the point - plane distance
        // i.e A = n * n.t with n being a normal of the plane
        // and is a point of the plane
        currentPoint = this->CurrentPlanarsPoints->points[planarIndex];
        int rejectionIndex = this->ComputePlaneDistanceParameters(kdtreePreviousPlanes, R, T, currentPoint, MatchingMode::EgoMotion);
        this->PlanarPointRejectionEgoMotion[planarIndex] = rejectionIndex;
        this->MatchRejectionHistogramPlane[rejectionIndex] += 1;
      }
    }

    usedEdges = this->MatchRejectionHistogramLine[6];
    usedPlanes = this->MatchRejectionHistogramPlane[6];
    // Skip this frame if there is too few geometric
    // keypoints matched
    if ((usedPlanes + usedEdges) < 20)
    {
      std::cout << "Too few geometric features, frame skipped" << std::endl;
      break;
    }

    double lossScale = this->EgoMotionInitLossScale + static_cast<double>(icpCount) * (this->EgoMotionFinalLossScale - this->EgoMotionInitLossScale) / (1.0 * this->EgoMotionICPMaxIter);

    // We want to estimate our 6-DOF parameters using a non
    // linear least square minimization. The non linear part
    // comes from the Euler Angle parametrization of the rotation
    // endomorphism of SO(3). To minimize it, we use CERES to perform
    // the Levenberg-Marquardt algorithm.
    ceres::Problem problem;
    for (unsigned int k = 0; k < Xvalues.size(); ++k)
    {
      if (this->Undistortion)
      {
        ceres::CostFunction* cost_function = new ceres::AutoDiffCostFunction<CostFunctions::MahalanobisDistanceInterpolatedMotionResidual, 1, 12>(
                                             new CostFunctions::MahalanobisDistanceInterpolatedMotionResidual(
                                                this->Avalues[k], this->Pvalues[k], this->Xvalues[k],
                                                this->TimeValues[k], this->residualCoefficient[k]));
        problem.AddResidualBlock(cost_function, new ceres::ScaledLoss(new ceres::ArctanLoss(lossScale), this->residualCoefficient[k],
                                                                      ceres::TAKE_OWNERSHIP), this->MotionParametersEgoMotion.data());
      }
      else
      {
        ceres::CostFunction* cost_function = new ceres::AutoDiffCostFunction<CostFunctions::MahalanobisDistanceAffineIsometryResidual, 1, 6>(
                                             new CostFunctions::MahalanobisDistanceAffineIsometryResidual(this->Avalues[k], this->Pvalues[k],
                                                                                                          this->Xvalues[k], this->residualCoefficient[k]));
        problem.AddResidualBlock(cost_function, new ceres::ScaledLoss(new ceres::ArctanLoss(lossScale), this->residualCoefficient[k], ceres::TAKE_OWNERSHIP), this->Trelative.data());
      }
    }

    ceres::Solver::Options options;
    options.max_num_iterations = this->EgoMotionLMMaxIter;
    options.linear_solver_type = ceres::DENSE_QR;
    options.minimizer_progress_to_stdout = false;

    ceres::Solver::Summary summary;
    ceres::Solve(options, &problem, &summary);
    std::cout << summary.BriefReport() << std::endl;

    // If no L-M iteration has been made since the
    // last ICP matching it means we reached a local
    // minimum for the ICP-LM algorithm
    if (summary.num_successful_steps == 1)
    {
      break;
    }
  }

  this->EgoMotionEdgesPointsUsed = usedEdges;
  this->EgoMotionPlanesPointsUsed  = usedPlanes;
  std::cout << "used keypoints : " << this->Xvalues.size() << std::endl;
  std::cout << "edges : " << usedEdges << " planes : " << usedPlanes << std::endl;

  // Integrate the relative motion
  // to the world transformation
  this->UpdateTworldUsingTrelative();
}

//-----------------------------------------------------------------------------
void Slam::Mapping()
{
  // Check that there is enought key-points to compute the Mapping
  if (this->CurrentEdgesPoints->size() == 0 && this->CurrentPlanarsPoints->size() == 0)
  {
    this->MappingVarianceError = 10;
    this->MappingEdgesPointsUsed = 0;
    this->MappingPlanesPointsUsed = 0;
    this->MappingBlobsPointsUsed = 0;
    // update maps
    this->UpdateMapsUsingTworld();
    std::cout << "Not enought keypoints, Mapping skipped for this frame" << std::endl;
    return;
  }
    this->EdgePointRejectionMapping.clear(); this->EdgePointRejectionMapping.resize(this->CurrentEdgesPoints->size());
    this->PlanarPointRejectionMapping.clear(); this->PlanarPointRejectionMapping.resize(this->CurrentPlanarsPoints->size());

  // Set the FarestPoint to reduce the map to the minimum size
  this->SetLidarMaximunRange(this->KeyPointsExtractor->GetFarestKeypointDist());

  // Update motion model parameters
  if (this->Undistortion)
  {
    std::copy(this->MotionParametersMapping.data(),
              this->MotionParametersMapping.data() + 5,
              this->MotionParametersMapping.data() + 6);
  }

  // get keypoints from the map
  pcl::PointCloud<Slam::Point>::Ptr subEdgesPointsLocalMap = this->EdgesPointsLocalMap->Get(this->Tworld);
  pcl::PointCloud<Slam::Point>::Ptr subPlanarPointsLocalMap = this->PlanarPointsLocalMap->Get(this->Tworld);

  // contruct kd-tree for fast closest points search
  KDTreePCLAdaptor kdtreeEdges(subEdgesPointsLocalMap);
  KDTreePCLAdaptor kdtreePlanes(subPlanarPointsLocalMap);
  pcl::KdTreeFLANN<Slam::Point>::Ptr kdtreeBlobs;

  std::cout << "========== Mapping ==========" << std::endl;
  std::cout << "Edges extracted from map: " << subEdgesPointsLocalMap->points.size()
            << "Planes extracted from map: " << subPlanarPointsLocalMap->points.size() << std::endl;

  if (!this->FastSlam)
  {
    pcl::PointCloud<Slam::Point>::Ptr subBlobPointsLocalMap = this->BlobsPointsLocalMap->Get(this->Tworld);
    kdtreeBlobs.reset(new pcl::KdTreeFLANN<Slam::Point>());
    kdtreeBlobs->setInputCloud(subBlobPointsLocalMap);
    std::cout << "blobs map: " << subBlobPointsLocalMap->points.size() << std::endl;
  }

  // Information about matches
  unsigned int usedEdges = 0;
  unsigned int usedPlanes = 0;
  unsigned int usedBlobs = 0;

  Point currentPoint;

  unsigned int toReserve =   this->CurrentEdgesPoints->size()
                           + this->CurrentPlanarsPoints->size()
                           + this->CurrentBlobsPoints->size();
  this->Xvalues.reserve(toReserve);
  this->Avalues.resize(toReserve);
  this->Pvalues.resize(toReserve);
  this->TimeValues.resize(toReserve);
  this->residualCoefficient.resize(toReserve);

  // ICP - Levenberg-Marquardt loop:
  // At each step of this loop an ICP matching is performed
  // Once the keypoints matched, we estimate the the 6-DOF
  // parameters by minimizing a non-linear least square cost
  // function using a Levenberg-Marquardt algorithm
  for (unsigned int icpCount = 0; icpCount < this->MappingICPMaxIter; ++icpCount)
  {
    // clear all keypoints matching data
    this->ResetDistanceParameters();

    // Init the undistortion interpolator
    if (this->Undistortion)
    {
      this->CreateWithinFrameTrajectory(this->WithinFrameTrajectory, WithinFrameTrajMode::MappingTraj);
    }

    // Rotation and position at this step
    Eigen::Matrix3d R = GetRotationMatrix(this->Tworld);
    Eigen::Vector3d T(this->Tworld(3), this->Tworld(4), this->Tworld(5));

    // loop over edges
    if (this->CurrentEdgesPoints->size() > 0 && subEdgesPointsLocalMap->points.size() > 10)
    {
      for (unsigned int edgeIndex = 0; edgeIndex < this->CurrentEdgesPoints->size(); ++edgeIndex)
      {
        // Find the closest correspondence edge line of the current edge point
        currentPoint = this->CurrentEdgesPoints->points[edgeIndex];
        int rejectionIndex = this->ComputeLineDistanceParameters(kdtreeEdges, R, T, currentPoint, MatchingMode::Mapping);
        this->EdgePointRejectionMapping[edgeIndex] = rejectionIndex;
        this->MatchRejectionHistogramLine[rejectionIndex] += 1;
        usedEdges = this->Xvalues.size();
      }
    }
    // loop over surfaces
    if (this->CurrentPlanarsPoints->size() > 0 && subPlanarPointsLocalMap->size() > 10)
    {
      for (unsigned int planarIndex = 0; planarIndex < this->CurrentPlanarsPoints->size(); ++planarIndex)
      {
        // Find the closest correspondence plane of the current planar point
        currentPoint = this->CurrentPlanarsPoints->points[planarIndex];
        int rejectionIndex = this->ComputePlaneDistanceParameters(kdtreePlanes, R, T, currentPoint, MatchingMode::Mapping);
        this->PlanarPointRejectionMapping[planarIndex] = rejectionIndex;
        this->MatchRejectionHistogramPlane[rejectionIndex] += 1;
        usedPlanes = this->Xvalues.size() - usedEdges;
      }
    }

    if (!this->FastSlam && this->NbrFrameProcessed > 10)
    {
      // loop over blobs
      for (unsigned int blobIndex = 0; blobIndex < this->CurrentBlobsPoints->size(); ++blobIndex)
      {
        // Find the closest correspondence plane of the current planar point
        currentPoint = this->CurrentBlobsPoints->points[blobIndex];
        this->ComputeBlobsDistanceParameters(kdtreeBlobs, R, T, currentPoint, MatchingMode::Mapping);
        usedBlobs = this->Xvalues.size() - usedPlanes - usedEdges;
      }
    }

    // Skip this frame if there is too few geometric keypoints matched
    if ((usedPlanes + usedEdges + usedBlobs) < 20)
    {
      std::cout << "Too few geometric features, loop breaked" << std::endl;
      std::cout << "planes: " << usedPlanes << " edges: " << usedEdges << " Blobs: " << usedBlobs << std::endl;
      break;
    }

    double lossScale = this->MappingInitLossScale + static_cast<double>(icpCount) * (this->MappingFinalLossScale - this->MappingInitLossScale) / (1.0 * this->MappingICPMaxIter);

    // We want to estimate our 6-DOF parameters using a non
    // linear least square minimization. The non linear part
    // comes from the Euler Angle parametrization of the rotation
    // endomorphism SO(3). To minimize it we use CERES to perform
    // the Levenberg-Marquardt algorithm.
    ceres::Problem problem;
    for (unsigned int k = 0; k < Xvalues.size(); ++k)
    {
      if (this->Undistortion)
      {
        ceres::CostFunction* cost_function = new ceres::AutoDiffCostFunction<CostFunctions::MahalanobisDistanceInterpolatedMotionResidual, 1, 12>(
                                                     new CostFunctions::MahalanobisDistanceInterpolatedMotionResidual(
                                                     this->Avalues[k], this->Pvalues[k], this->Xvalues[k],
                                                     this->TimeValues[k], this->residualCoefficient[k]));
        problem.AddResidualBlock(cost_function, new ceres::ScaledLoss(new ceres::ArctanLoss(lossScale), this->residualCoefficient[k],
                                                                      ceres::TAKE_OWNERSHIP), this->MotionParametersMapping.data());
      }
      else
      {
        ceres::CostFunction* cost_function = new ceres::AutoDiffCostFunction<CostFunctions::MahalanobisDistanceAffineIsometryResidual, 1, 6>(
                                             new CostFunctions::MahalanobisDistanceAffineIsometryResidual(this->Avalues[k], this->Pvalues[k],
                                                                                                          this->Xvalues[k], this->residualCoefficient[k]));
        problem.AddResidualBlock(cost_function, new ceres::ScaledLoss(new ceres::ArctanLoss(lossScale), this->residualCoefficient[k], ceres::TAKE_OWNERSHIP), this->Tworld.data());
      }
    }

    ceres::Solver::Options options;
    options.max_num_iterations = this->MappingLMMaxIter;
    options.linear_solver_type = ceres::DENSE_QR;
    options.minimizer_progress_to_stdout = false;

    ceres::Solver::Summary summary;
    ceres::Solve(options, &problem, &summary);
    std::cout << summary.BriefReport() << std::endl;

    // If no L-M iteration has been made since the
    // last ICP matching it means we reached a local
    // minimum for the ICP-LM algorithm
    if (((summary.num_successful_steps == 1) ||
        (icpCount == (this->MappingICPMaxIter - 1))) &&
        !this->Undistortion)
    {
      // Now evaluate the quality of the parameters
      // estimated using an approximate computation
      // of the variance covariance matrix
      // Covariance computation options
      ceres::Covariance::Options covOptions;
      covOptions.apply_loss_function = true;
      covOptions.algorithm_type = ceres::CovarianceAlgorithmType::DENSE_SVD;

      // Computation of the variance-covariance matrix
      ceres::Covariance covariance(covOptions);
      std::vector<std::pair<const double*, const double* > > covariance_blocks;
      covariance_blocks.push_back(std::make_pair(this->Tworld.data(), this->Tworld.data()));
      covariance.Compute(covariance_blocks, &problem);
      double covarianceMat[6 * 6];
      covariance.GetCovarianceBlock(this->Tworld.data(), this->Tworld.data(), covarianceMat);
      for (int i = 0; i < 6; ++i)
        for (int j = 0; j < 6; ++j)
          this->TworldCovariance(i, j) = covarianceMat[i + 6 * j];
      break;
    }
  }

  Eigen::SelfAdjointEigenSolver<Eigen::MatrixXd> eig(this->TworldCovariance);
  Eigen::MatrixXd D = eig.eigenvalues();

  this->MappingVarianceError = D(5);
  this->MappingEdgesPointsUsed = usedEdges;
  this->MappingPlanesPointsUsed = usedPlanes;
  this->MappingBlobsPointsUsed = usedBlobs;

  std::cout << "Matches used: Total: " << this->Xvalues.size()
            << " edges: " << usedEdges << " planes: " << usedPlanes << " blobs: " << usedBlobs << std::endl;

  std::cout << "Covariance Eigen values: " << D.transpose() << std::endl;
  std::cout << "Maximum variance eigen vector: " << eig.eigenvectors().col(5).transpose() << std::endl;
  std::cout << "Maximum variance: " << D(5) << std::endl;

  if (this->Undistortion)
  {
    for (int i = 0; i < 6; ++i)
    {
      this->Tworld(i) = this->MotionParametersMapping(i + 6);
    }
  }

  // Add the current computed transform to the list
  this->TworldList.push_back(this->Tworld);

  // Update maps
  this->UpdateMapsUsingTworld();

  // Transform the current keypoints
  // in the sensor reference frame
  // corresponding to the end of the
  // frame
  if (this->Undistortion)
  {
    this->UpdateCurrentKeypointsUsingTworld();
  }
  // Compute the undistortion interpolator before replacing previousTworld
  // this interpolator will be used to output the mapped current frame
  this->CreateWithinFrameTrajectory(this->WithinFrameTrajectory, WithinFrameTrajMode::MappingTraj);

  // Update the PreviousTworld data
  this->PreviousTworld = this->Tworld;
}

//-----------------------------------------------------------------------------
void Slam::UpdateCurrentKeypointsUsingTworld()
{
  // Create the undistortion interpolator

  // Now transform the keypoints
  this->ExpressPointCloudInOtherReferencial(this->CurrentEdgesPoints);
  this->ExpressPointCloudInOtherReferencial(this->CurrentPlanarsPoints);
  this->ExpressPointCloudInOtherReferencial(this->CurrentBlobsPoints);
}

//-----------------------------------------------------------------------------
void Slam::UpdateMapsUsingTworld()
{
  // Init the mapping interpolator
  if (this->Undistortion)
  {
    this->CreateWithinFrameTrajectory(this->WithinFrameTrajectory, WithinFrameTrajMode::MappingTraj);
  }

  // it would nice to add the point frome the frame directly to the map
  auto updateMap = [this] (std::shared_ptr<RollingGrid> map, pcl::PointCloud<Slam::Point>::Ptr frame) {
    pcl::PointCloud<Slam::Point>::Ptr temporaryMap(new pcl::PointCloud<Slam::Point>());
    for (size_t i = 0; i < frame->size(); ++i)
    {
      temporaryMap->push_back(frame->at(i));
      this->TransformToWorld(temporaryMap->at(i));
    }
    map->Roll(this->Tworld);
    map->Add(temporaryMap);
  };

  updateMap(this->EdgesPointsLocalMap, this->CurrentEdgesPoints);
  updateMap(this->PlanarPointsLocalMap, this->CurrentPlanarsPoints);
  if (!this->FastSlam)
  {
    updateMap(this->BlobsPointsLocalMap, this->CurrentBlobsPoints);
  }

}

//-----------------------------------------------------------------------------
void Slam::CreateWithinFrameTrajectory(SampledSensorPath& path, WithinFrameTrajMode mode)
{
  Eigen::VectorXd motionParameters;
  if (mode == WithinFrameTrajMode::EgoMotionTraj)
  {
    motionParameters = this->MotionParametersEgoMotion;
  }
  else
  {
    motionParameters = this->MotionParametersMapping;
  }

  // Position and orientation of the sensor at time t0
  Eigen::Vector3d angles0(motionParameters(0), motionParameters(1), motionParameters(2));
  Eigen::Matrix3d R0 = RollPitchYawToMatrix(angles0);
  Eigen::Vector3d T0(motionParameters(3), motionParameters(4), motionParameters(5));
  // Position and orientation of the sensor at time t1
  Eigen::Vector3d angles1(motionParameters(6), motionParameters(7), motionParameters(8));
  Eigen::Matrix3d R1 = RollPitchYawToMatrix(angles1);
  Eigen::Vector3d T1(motionParameters(9), motionParameters(10), motionParameters(11));

  if (mode == WithinFrameTrajMode::UndistortionTraj)
  {
    // Relative motion between t0 and t1
    Eigen::Matrix3d dR = R1.transpose() * R0;
    Eigen::Vector3d dT = R1.transpose() * (T0 - T1);

    R0 = dR;
    T0 = dT;
    R1 = Eigen::Matrix3d::Identity();
    T1 = Eigen::Vector3d::Zero();
  }

  path.Samples.resize(2);
  // Add orientation / position of the sensor at the beginning of the frame
  this->WithinFrameTrajectory.Samples[0].R = R0;
  this->WithinFrameTrajectory.Samples[0].T = T0;
  this->WithinFrameTrajectory.Samples[0].time = 0.0;
  // Add orientation / position of the sensor at the end of the frame
  this->WithinFrameTrajectory.Samples[1].R = R1;
  this->WithinFrameTrajectory.Samples[1].T = T1;
  this->WithinFrameTrajectory.Samples[1].time = 1.0;
}

//-----------------------------------------------------------------------------
void Slam::ExpressPointInOtherReferencial(Point& p)
{
  // interpolate the transform
  AffineIsometry iso = this->WithinFrameTrajectory(p.intensity);
  Eigen::Vector3d X(p.x, p.y, p.z);
  Eigen::Vector3d Y = iso.R * X + iso.T;
  p.x = Y(0); p.y = Y(1); p.z = Y(2);
}

//-----------------------------------------------------------------------------
void Slam::ExpressPointCloudInOtherReferencial(pcl::PointCloud<Point>::Ptr pointcloud)
{
  for (unsigned int k = 0; k < pointcloud->size(); ++k)
  {
    ExpressPointInOtherReferencial(pointcloud->points[k]);
  }
}

//-----------------------------------------------------------------------------
void Slam::ResetDistanceParameters()
{
  this->Xvalues.resize(0);
  this->Avalues.resize(0);
  this->Pvalues.resize(0);
  this->TimeValues.resize(0);
  this->residualCoefficient.resize(0);
  this->MatchRejectionHistogramLine.clear();
  this->MatchRejectionHistogramLine.resize(this->NrejectionCauses, 0);
  this->MatchRejectionHistogramPlane.clear();
  this->MatchRejectionHistogramPlane.resize(this->NrejectionCauses, 0);
  this->MatchRejectionHistogramBlob.clear();
  this->MatchRejectionHistogramBlob.resize(this->NrejectionCauses, 0);
}

//-----------------------------------------------------------------------------
void Slam::UpdateTworldUsingTrelative()
{
  if (this->Undistortion)
  {
    // Position and orientation of the sensor at time t0
    // according to the reference frame attached to the
    // sensor during the previous frame
    Eigen::Vector3d angles0(this->MotionParametersEgoMotion(0), this->MotionParametersEgoMotion(1), this->MotionParametersEgoMotion(2));
    Eigen::Matrix3d dR0 = RollPitchYawToMatrix(angles0);
    Eigen::Vector3d dT0(this->MotionParametersEgoMotion(3), this->MotionParametersEgoMotion(4), this->MotionParametersEgoMotion(5));
    // Position and orientation of the sensor at time t1
    // according to the reference frame attached to the
    // sensor during the previous frame
    Eigen::Vector3d angles1(this->MotionParametersEgoMotion(6), this->MotionParametersEgoMotion(7), this->MotionParametersEgoMotion(8));
    Eigen::Matrix3d dR1 = RollPitchYawToMatrix(angles1);
    Eigen::Vector3d dT1(this->MotionParametersEgoMotion(9), this->MotionParametersEgoMotion(10), this->MotionParametersEgoMotion(11));
    // Position and orientation of the sensor during previous
    // frame according to the world reference frame
    Eigen::Vector3d angles2(this->MotionParametersMapping(6), this->MotionParametersMapping(7), this->MotionParametersMapping(8));
    Eigen::Matrix3d Rw = RollPitchYawToMatrix(angles2);
    Eigen::Vector3d Tw(this->MotionParametersMapping(9), this->MotionParametersMapping(10), this->MotionParametersMapping(11));

    // estimation of the sensor position and orientation
    // at the time t0 according to the world rederence frame
    Eigen::Matrix3d R0 = Rw * dR0;
    Eigen::Vector3d T0 = Rw * dT0 + Tw;
    // estimation of the sensor position and orientation
    // at the time t1 according to the world rederence frame
    Eigen::Matrix3d R1 = Rw * dR1;
    Eigen::Vector3d T1 = Rw * dT1 + Tw;

    // Next estimation of Tworld using
    // the odometry result. This estimation
    // will be used to undistorded the frame
    // if required and to initialize the
    this->MotionParametersMapping(0) = std::atan2(R0(2, 1), R0(2, 2));
    this->MotionParametersMapping(1) = -std::asin(R0(2, 0));
    this->MotionParametersMapping(2) = std::atan2(R0(1, 0), R0(0, 0));
    this->MotionParametersMapping(3) = T0(0);
    this->MotionParametersMapping(4) = T0(1);
    this->MotionParametersMapping(5) = T0(2);

    this->MotionParametersMapping(6) = std::atan2(R1(2, 1), R1(2, 2));
    this->MotionParametersMapping(7) = -std::asin(R1(2, 0));
    this->MotionParametersMapping(8) = std::atan2(R1(1, 0), R1(0, 0));
    this->MotionParametersMapping(9) = T1(0);
    this->MotionParametersMapping(10) = T1(1);
    this->MotionParametersMapping(11) = T1(2);
  }
  else
  {
    // Relative orientation and position estimated
    // according to the last sensor pose reference frame
    Eigen::Matrix3d Rr, Rw;
    Rr = GetRotationMatrix(this->Trelative);
    Eigen::Vector3d Tr(this->Trelative(3), this->Trelative(4), this->Trelative(5));
    // Orientation and position of the sensor at its last pose
    Rw = GetRotationMatrix(this->Tworld);
    Eigen::Vector3d Tw(this->Tworld(3), this->Tworld(4), this->Tworld(5));

    // The new pos of the sensor in the world
    // referential is the previous one composed
    // with the relative motion estimated at the
    // odometry step
    Eigen::Matrix3d newRw = Rw * Rr;
    Eigen::Vector3d newTw = Rw * Tr + Tw;

    // Next estimation of Tworld using
    // the odometry result. This estimation
    // will be used to undistorded the frame
    // if required and to initialize the
    this->Tworld(0) = std::atan2(newRw(2, 1), newRw(2, 2));;
    this->Tworld(1) = -std::asin(newRw(2, 0));
    this->Tworld(2) = std::atan2(newRw(1, 0), newRw(0, 0));
    this->Tworld(3) = newTw(0);
    this->Tworld(4) = newTw(1);
    this->Tworld(5) = newTw(2);
  }
}

//-----------------------------------------------------------------------------
void Slam::SetVoxelGridLeafSizeEdges(double size)
{
  this->EdgesPointsLocalMap->SetLeafSize(size);
}

//-----------------------------------------------------------------------------
void Slam::SetVoxelGridLeafSizePlanes(double size)
{
  this->PlanarPointsLocalMap->SetLeafSize(size);
}

//-----------------------------------------------------------------------------
void Slam::SetVoxelGridLeafSizeBlobs(double size)
{
  this->BlobsPointsLocalMap->SetLeafSize(size);
}

//-----------------------------------------------------------------------------
void Slam::SetVoxelGridSize(unsigned int size)
{
  this->EdgesPointsLocalMap->SetSize(size);
  this->PlanarPointsLocalMap->SetSize(size);
  this->BlobsPointsLocalMap->SetSize(size);
}

//-----------------------------------------------------------------------------
void Slam::SetVoxelGridResolution(double resolution)
{
  this->EdgesPointsLocalMap->SetResolution(resolution);
  this->PlanarPointsLocalMap->SetResolution(resolution);
  this->BlobsPointsLocalMap->SetResolution(resolution);
}

//-----------------------------------------------------------------------------
void Slam::SetLidarMaximunRange(const double maxRange)
{
  this->EdgesPointsLocalMap->SetPointCoudMaxRange(maxRange);
  this->PlanarPointsLocalMap->SetPointCoudMaxRange(maxRange);
  this->BlobsPointsLocalMap->SetPointCoudMaxRange(maxRange);
}
