/*!
  @file visod.cpp
  @copyright 2013 Kubota Lab. All rights resereved.
*/

#include "openlab2013/visod.h"

#include <algorithm>
#include <vector>
#include <iterator>
#include <iostream>
#include <limits>

#include <tf/transform_listener.h>
#include <geometry_msgs/PoseStamped.h>
#include <sensor_msgs/image_encodings.h>

#include <boost/random/mersenne_twister.hpp>
#include <boost/random/uniform_int.hpp>

#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/video/video.hpp>
#include <opencv2/nonfree/features2d.hpp>

#include <pcl/common/norms.h>
#include <pcl/common/transforms.h>
#include <pcl/ros/conversions.h>
#include <pcl/filters/passthrough.h>

#include "KneipP3P/P3p.h"

#define _TIMER_BEGIN_ \
  static long time_lasped = 0;\
  static int time_cnt = 0;\
  boost::posix_time::ptime start(boost::posix_time::microsec_clock::local_time());
#define _TIMER_END_ \
  boost::posix_time::ptime end(boost::posix_time::microsec_clock::local_time());\
  boost::posix_time::time_duration cur_time(end - start);\
  time_lasped += cur_time.total_microseconds();\
  fprintf(stdout, "Processing time: %.5f msec\n", 0.001*cur_time.total_microseconds());\
  if (++time_cnt == 5)\
  {\
    fprintf(stdout, "Processing time (ave.) : %.5f msec\n", time_lasped * 0.001 / 5.0);\
    time_cnt = 0;\
    time_lasped = 0;\
  }

namespace mrover 
{

void VisOd::stereoCameraCB(const sensor_msgs::ImageConstPtr &imgL, const sensor_msgs::ImageConstPtr &imgR, const sensor_msgs::CameraInfoConstPtr &infoL, const sensor_msgs::CameraInfoConstPtr &infoR)
{
  frame_cnt_ = imgL->header.seq;
  ROS_INFO("frame %lu", frame_cnt_);

  /* converting images into opencv format (mono) */
  cv_bridge::CvImage::Ptr cv_ptrL;
  cv_bridge::CvImage::Ptr cv_ptrR;
  try {
    cv_ptrL = cv_bridge::toCvCopy(imgL, "mono8");
    cv_ptrR = cv_bridge::toCvCopy(imgR, "mono8");
  }
  catch(cv_bridge::Exception &e) {
    ROS_ERROR("cv_bridge: conversion error");
    return;
  }
  queueL_.push(cv_ptrL->image);
  queueR_.push(cv_ptrR->image);


  /* converting projection matrices into opencv format */
  if (!PL_.data || !PR_.data)
  {
    PL_.create(3, 4, CV_32F);
    PR_.create(3, 4, CV_32F);
    for (int i = 0; i < 3; ++i)
      for (int j = 0; j < 4; ++j)
      {
        PL_.at<float>(i, j) = infoL->P[4*i+j];
        PR_.at<float>(i, j) = infoR->P[4*i+j];
      }
    sw_.setK(PL_);
    sw_.setWorkingDir(work_dir_);
  }

  /* obtain homography matrix */
  // TODO split this into other callback
  {
    if (!H_.data) H_.create(3, 3, CV_32F);
    static tf::TransformListener listener;
    tf::StampedTransform transform;
    try
    {
      listener.waitForTransform("/rover/left_camera", "/rover/base_link", ros::Time::now(), ros::Duration(3.0));
      listener.lookupTransform("/rover/left_camera", "/rover/base_link", ros::Time(0), transform);
    }
    catch (tf::TransformException &e)
    {
      ROS_WARN("tf not published yet. retrying...");
      publishInitialTF();
      return;
    }
    tf::Vector3    position = transform.getOrigin();
    tf::Quaternion quaternion = transform.getRotation();
    tf::Matrix3x3  orientation(quaternion);

    cv::Mat RT(3, 4, CV_32F);
    for (int i = 0; i < 3; ++i)
      for (int j = 0; j < 3; ++j)
        RT.at<float>(i, j) = orientation[i][j];
    RT.at<float>(0, 3) = position.x();
    RT.at<float>(1, 3) = position.y();
    RT.at<float>(2, 3) = position.z();

    cv::Mat K = PL_(cv::Rect(0, 0, 3, 3));
    cv::Mat P = K * RT;
    cv::Mat H0 = H_.col(0);
    cv::Mat H1 = H_.col(1);
    cv::Mat H2 = H_.col(2);
    P.col(0).copyTo(H0);
    P.col(1).copyTo(H1);
    P.col(3).copyTo(H2);  // !!
    H_ = H_.inv();
    H_ /= H_.at<float>(2, 2);

    /* transform matrix in eigen */
    transform_.translation() = Eigen::Vector3f(position.x(), position.y(), position.z());
    Eigen::Matrix3f R;
    for (int i = 0; i < 3; ++i)
      for (int j = 0; j < 3; ++j)
        R(i, j) = orientation[i][j];
    transform_.linear() = R;

    /* initial pose of left camera */
    if (!mTc_.data || !cTb_.data)
    {
      cTb_ = cv::Mat::eye(4, 4, CV_32F);
      for (int i = 0; i < 3; ++i)
        for (int j = 0; j < 3; ++j)
          cTb_.at<float>(i, j) = R(i, j);
      for (int i = 0; i < 3; ++i)
        cTb_.at<float>(i, 3) = transform_.translation()[i];
      //mTc_.create(4, 4, CV_32F);
      mTc_ = cTb_.inv();
      rect_mTc_ = cTb_.inv();
      //ROS_INFO_STREAM("Initial pose is set to \n" << mTc_);
    }
  }

  /* estimate pose */
  cv::Mat pTc = cv::Mat::eye(4, 4, CV_32F);
  process(pTc);
  mTc_ = mTc_ * pTc;
  applyPlanarApproximation(mTc_); // tmp


  /* publish pose and path */
  //publishCurrentTF(true);

  /* save to file */
#if 0
  static bool saved = false;
  if (!saved && frame_cnt_ > 400)
  {
    saved = true;
    if (traj_ofs_.is_open())
    {
      tf::Pose pose_p(tf::Quaternion(0, 0, 0, 1));
      for (size_t i = 0; i < path_.poses.size(); ++i) // tmp
      {
        tf::Stamped<tf::Pose> pose_c;
        tf::poseStampedMsgToTF(path_.poses[i], pose_c);
        tf::Pose motion = pose_p.inverse() * pose_c;

        tf::Quaternion quat = motion.getRotation();
        tf::Vector3 pos = motion.getOrigin();
        traj_ofs_ << counters_->at(i) << " "
          << quat.w() << " " << quat.x() << " " << quat.y() << " " << quat.z() << " "
          << pos.x() << " " << pos.y() << " " << pos.z() << " "
          << reasons_->at(i) << "\n"; 
        pose_p = pose_c;
      }
      traj_ofs_.close();
    }
    exit(0);
  }
#endif
#if 0
  if (traj_ofs_.is_open())
  {
    static cv::Mat mTpb = cv::Mat::eye(4, 4, CV_32F);
    cv::Mat mTb = mTc_ * cTb_;
    cv::Mat pbTcb = mTpb.inv() * mTb;
    tf::Vector3 pos(pbTcb.at<float>(0, 3), 
        pbTcb.at<float>(1, 3), 
        pbTcb.at<float>(2, 3));
    tf::Matrix3x3 rot;
    tf::Quaternion quat;
    for (int ii = 0; ii < 3; ii++)
      for (int jj = 0; jj < 3; jj++)
        rot[ii][jj] = pbTcb.at<float>(ii, jj);
    rot.getRotation(quat);
    traj_ofs_ << frame_cnt_ << " "
      << quat.w() << " " << quat.x() << " " << quat.y() << " " << quat.z() << " "
      << pos.x() << " " << pos.y() << " " << pos.z() << " 333\n"; 
    mTpb = mTb;
  }
  /*
  if (traj_ofs_.is_open())
  {
    cv::Mat mTb = mTc_ * cTb_;
    traj_ofs_ << mTb.at<float>(0, 3) << "\t"
        << mTb.at<float>(1, 3) << "\t"
        << mTb.at<float>(2, 3) << "\n";
  }
  */
#endif
}

void VisOd::process(cv::Mat &pTc)
{
  static bool renew = true;
  static bool large_motion = false;
  cv::Mat img0, img1, img0_R;
  static KeyPointsPtr kp0(new KeyPoints);
  static KeyPointsPtr kp1(new KeyPoints);
  static IndicesPtr inliers(new Indices);
  CloudPtr cloud0(new Cloud);
  CloudPtr cloud1(new Cloud);

  KeyPointsPtr kp1_prev(new KeyPoints(*kp1));
  static cv::Mat accum = cv::Mat::eye(4, 4, CV_32F);
  static CloudPtr cloud_st(new Cloud);
  static KeyPointsPtr kp_st;
  static IndicesPtr inliers_st;

  /* fetch images */
  try
  {
    img0 = queueL_.getPrevImg();
    img1 = queueL_.getCurrImg();
    img0_R = queueR_.getPrevImg();
  }
  catch (int e)
  {
    ROS_ERROR("no prev image");
    pTc = cv::Mat::eye(4, 4, CV_32F);
    return;
  }

  //TODO consider case of large motion
  
  /* extract features */
  kp0 = kp1;
  kp1.reset(new KeyPoints);
  if (!kp0->size()) 
  {
    extractFeatures(img0, *kp0, "HARRIS", false);
    if (kp0->size() < 5)
    {
      ROS_ERROR("too few points extracted");
      pTc = cv::Mat::eye(4, 4, CV_32F);
      // TODO remove the last element of the queue
      return; 
    }
  }

  /* tracking */
  if (!inliers_st)
    trackLK(img0, img1, *kp0, *kp1);
  else
    trackLK(img0, img1, *kp0, *kp1, *inliers_st);
  ROS_INFO("Num KeyPoints-->%lu", kp0->size());

  if (kp1->size() > 0)
  {
    /* locate points */
    //triangulate(img0, img0_R, *kp0, *cloud);
    projectPointsOntoGround(*kp0, *cloud0);
    projectPointsOntoGround(*kp1, *cloud1);

    /* estimate motion */
    inliers.reset(new Indices);
    switch(method_) {
      case 0: estimateMotion(*cloud0, *kp1, pTc, *inliers); break;
      case 1: estimateMotionP3P(*cloud0, *kp1, pTc, *inliers); break;
      case 2: estimateMotion2PT(*cloud0, *cloud1, *kp0, *kp1, pTc, *inliers); break;
      case 3: estimateMotion3PT(*cloud0, *cloud1, pTc, *inliers); break;
      default: pTc = cv::Mat::eye(4, 4, CV_32F); break;
    }
    ROS_INFO("Num inliers %lu (%.1f %%)", inliers->size(), 100.0*inliers->size()/cloud0->points.size());
  }
  else
  {
    inliers.reset(new Indices);
    pTc = cv::Mat::eye(4, 4, CV_32F);
  }


  /* check distribution of key points */
  size_t inlier_thresh = 100;
  bool no_distrib = (inliers->size() < inlier_thresh);
  if (!no_distrib)
  {
    for (int i = 0; i < inliers->size(); ++i)
    {
      if (kp0->at(inliers->at(i)).pt.y > img0.rows / 2) break;
      if (i == inliers->size() - 1) no_distrib = true;
    }
  }
  ROS_INFO("Tracking status: %s%s",
      large_motion? "large motion, ": "",
      no_distrib? "poor distribution": "");

  /* sliding window sba */
  if((no_distrib || large_motion) && kp_st)
  {
    /* perform SBA */
    counters_->push_back(frame_cnt_);
    reasons_->push_back(large_motion? 0: 1);
    CloudPtr points3D(new Cloud);
    static pcl::PassThrough<PointType> filter;
    filter.setInputCloud(cloud_st);
    filter.setIndices(inliers_st);
    filter.filter(*points3D);
    filterByIndices(*kp_st, *inliers_st);
    ROS_INFO("points3D %lu", points3D->points.size());

  // debug
  {
    cv_bridge::CvImagePtr cv_ptr(new cv_bridge::CvImage);
    cv_ptr->image.create(img1.size(), CV_8UC3);
    cv::cvtColor(img1, cv_ptr->image, CV_GRAY2BGR);
    for (int i = 0; i < kp0->size(); ++i)
    {
      cv::line(cv_ptr->image, kp_st->at(i).pt, kp1_prev->at(i).pt, cv::Scalar(0, 200, 100), 2);
    }
    cv_ptr->header.stamp = ros::Time::now();
    cv_ptr->header.frame_id = "/rover/left_camera";
    cv_ptr->encoding = sensor_msgs::image_encodings::BGR8;
    inlier_img_pub_.publish(cv_ptr->toImageMsg());
  }
    


    sw_.update(accum, points3D, kp_st, kp1_prev);

    /* publish as path */
    ros::Time now = ros::Time::now();
    std::vector<cv::Mat> &poses = sw_.getPoses();
    path_.poses.clear();
    cv::Mat mTc = cTb_.inv();
    for (size_t i = 1; i < poses.size(); ++i)
    {
      mTc = mTc * poses[i-1] * poses[i].inv();
      //applyPlanarApproximation(mTc);
      tf::Transform pose_tf = cv2tfM(mTc * cTb_);
      tf_pub_.sendTransform(tf::StampedTransform(pose_tf, now, "/map", "/rover/base_link"));
      tf::Stamped<tf::Pose> pose_tf_stamped(pose_tf, now, "/rover/base_link");
      geometry_msgs::PoseStamped pose_msg;
      tf::poseStampedTFToMsg(pose_tf_stamped, pose_msg);
      path_.poses.push_back(pose_msg);
    }
    path_.header.frame_id = "/map";
    path_.header.stamp = now;
    path_pub_.publish(path_);

    /* call dense stereo when exceeds threshold */
    static size_t motion_inc = 0;
    double distance_thresh = 1.0;
    ++motion_inc;
    if (motion_inc >= distance_thresh / mot_thresh_)
    {
      sendStereoTrigger();
      motion_inc = 0;
    }



#if 0
    cv::Mat pose1 = sw_.update(accum, points3D, kp_st, kp1_prev);
    cv::Mat pose0 = sw_.getPose(-1);

    /* publish as path */

    ros::Time now = ros::Time::now();
    cv::Mat mTb = rect_mTc_ * pose0 * pose1.inv() * cTb_;
    // planar approx
    {
      mTb.at<float>(2, 3) = 0;  // z
      mTb.at<float>(2, 0) = mTb.at<float>(2, 1) = 0;
      mTb.at<float>(0, 2) = mTb.at<float>(1, 2) = 0;
      mTb.at<float>(2, 2) = 1;
      float f0 = cv::norm(mTb.col(0));
      float f1 = cv::norm(mTb.col(1));
      mTb.at<float>(0, 0) /= f0;
      mTb.at<float>(1, 0) /= f0;
      mTb.at<float>(0, 1) /= f1;
      mTb.at<float>(1, 1) /= f1;
    }

    tf::Vector3 position(mTb.at<float>(0, 3), mTb.at<float>(1, 3), mTb.at<float>(2, 3));
    tf::Matrix3x3 rotation;
    tf::Quaternion quaternion;
    for (int ii = 0; ii < 3; ii++)
      for (int jj = 0; jj < 3; jj++)
        rotation[ii][jj] = mTb.at<float>(ii, jj);
    rotation.getRotation(quaternion);

    tf::Transform pose_tf(quaternion, position);
    //tf_pub_.sendTransform(tf::StampedTransform(pose_tf, now, "/map", "/rover/base_link"));
    tf::Stamped<tf::Pose> pose_tf_stamped(pose_tf, now, "/rover/base_link");
    geometry_msgs::PoseStamped pose_msg;
    tf::poseStampedTFToMsg(pose_tf_stamped, pose_msg);
    path_.poses.push_back(pose_msg);
    path_.header.frame_id = "/map";
    path_.header.stamp = now;
    path_pub_.publish(path_);

    rect_mTc_ = mTb * cTb_.inv();
#endif



#if 0
    /* save to file */
    {
      if (traj_ofs_.is_open())
      {
        static cv::Mat mTpb = cv::Mat::eye(4, 4, CV_32F);
        cv::Mat pbTcb = mTpb.inv() * mTb;
        tf::Vector3 pos(pbTcb.at<float>(0, 3), 
            pbTcb.at<float>(1, 3), 
            pbTcb.at<float>(2, 3));
        tf::Matrix3x3 rot;
        tf::Quaternion quat;
        for (int ii = 0; ii < 3; ii++)
          for (int jj = 0; jj < 3; jj++)
            rot[ii][jj] = pbTcb.at<float>(ii, jj);
        rot.getRotation(quat);
        ROS_INFO("%d", counters_->back());
        traj_ofs_ << counters_->back() << " "
          << quat.w() << " " << quat.x() << " " << quat.y() << " " << quat.z() << " "
          << pos.x() << " " << pos.y() << " " << pos.z() << "\n"; 
        mTpb = mTb;
      }
    }
#endif

    /* reset */
    renew = true;
    large_motion = false;
    no_distrib = false;
    extractFeatures(img0, *kp0, "HARRIS", false);
    trackLK(img0, img1, *kp0, *kp1);
    inliers.reset(new Indices);
    if (kp0->size() > 0)
    {
      projectPointsOntoGround(*kp0, *cloud0);
      projectPointsOntoGround(*kp1, *cloud1);
      switch(method_) {
        case 0: estimateMotion(*cloud0, *kp1, pTc, *inliers); break;
        case 1: estimateMotionP3P(*cloud0, *kp1, pTc, *inliers); break;
        case 2: estimateMotion2PT(*cloud0, *cloud1, *kp0, *kp1, pTc, *inliers); break;
        case 3: estimateMotion3PT(*cloud0, *cloud1, pTc, *inliers); break;
        default: pTc = cv::Mat::eye(4, 4, CV_32F); break;
      }
      ROS_INFO("Initial feature size: %lu", inliers->size());
      accum = pTc.inv();
      kp_st = kp0;
      cloud_st = cloud0;
      inliers_st = inliers;
    }
    if (inliers->size() < 4)
    {
      ROS_WARN("murideshita...");
      accum = cv::Mat::eye(4, 4, CV_32F);
      kp0.reset(new KeyPoints);
      kp1.reset(new KeyPoints);
      inliers.reset(new Indices);
      kp_st.reset();
      inliers_st.reset();
      cloud_st.reset();
    }
  }
  else
  {
    if (!kp_st)
    {
      kp_st = kp0;
      cloud_st = cloud0;
      inliers_st = inliers;
    }
    else
    {
      filterByIndices(*inliers_st, *inliers);
    }

    accum = pTc.inv() * accum;
    double mot_thresh = pow(mot_thresh_, 2);  // m
    double mot = pow(accum.at<float>(0, 3), 2) 
                 + pow(accum.at<float>(1, 3), 2) 
                 + pow(accum.at<float>(2, 3), 2);
    large_motion = (mot > mot_thresh);

  }


  // debug
  if (0)
  {
    cv_bridge::CvImagePtr cv_ptr(new cv_bridge::CvImage);
    cv_ptr->image.create(img1.size(), CV_8UC3);
    cv::cvtColor(img1, cv_ptr->image, CV_GRAY2BGR);
    for (int i = 0; i < kp0->size(); ++i)
    {
      cv::line(cv_ptr->image, kp0->at(i).pt, kp1->at(i).pt, cv::Scalar(0, 200, 100), 2);
    }
    for (int i = 0; i < inliers->size(); ++i)
    {
      cv::line(cv_ptr->image, kp0->at(inliers->at(i)).pt, kp1->at(inliers->at(i)).pt, cv::Scalar(0, 0, 255), 2);
      cv::circle(cv_ptr->image, kp1->at(inliers->at(i)).pt, 3, cv::Scalar(255, 0, 0), 1);
    }
    cv_ptr->header.stamp = ros::Time::now();
    cv_ptr->header.frame_id = "/rover/left_camera";
    cv_ptr->encoding = sensor_msgs::image_encodings::BGR8;
    inlier_img_pub_.publish(cv_ptr->toImageMsg());
  }

  if (inliers->size() > 0) filterByIndices(*kp1, *inliers);
}

void VisOd::extractFeatures(const cv::Mat &img, std::vector<cv::KeyPoint> &kp, const std::string &detector_type, bool use_subpixel)
{
  /* error handling */
  //TODO 
  
  /* detect features */
  static cv::Ptr<cv::FeatureDetector> _detector(cv::FeatureDetector::create(detector_type));
  static cv::GridAdaptedFeatureDetector detector(_detector, 512, 16, 16);
  cv::Mat mask = cv::Mat::zeros(img.size(), CV_8U);
  //cv::rectangle(mask, cv::Point(0, 0.2*img.rows), cv::Point(img.cols, img.rows), 255, -1);
  cv::rectangle(mask, cv::Point(0, 0.1*img.rows), cv::Point(img.cols, img.rows), 255, -1);
  detector.detect(img, kp, mask);

  /* subpixel */
  if (use_subpixel)
  {
    static cv::Size winSize(5, 5);
    static cv::Size zeroZone(-1, -1);
    static cv::TermCriteria criteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 40, 0.001 );
    std::vector<cv::Point2f> kp_subpix;
    cv::KeyPoint::convert(kp, kp_subpix);
    cv::cornerSubPix(img, kp_subpix, winSize, zeroZone, criteria);
    cv::KeyPoint::convert(kp_subpix, kp);
  }
}

void VisOd::matchFeatures(const cv::Mat &imgL, const cv::Mat &imgR, std::vector<cv::KeyPoint> &kpL, std::vector<cv::KeyPoint> &kpR, std::vector<cv::DMatch> &match, const std::string &descriptor_type, const std::string &matcher_type)
{
  static cv::Ptr<cv::DescriptorExtractor> extractor(cv::DescriptorExtractor::create(descriptor_type));
  static cv::Ptr<cv::DescriptorMatcher> matcher(cv::DescriptorMatcher::create(matcher_type));
  static cv::SIFT sift;

  /* error handling */
  // TODO

  /* descriptor extraction */
  cv::Mat descL, descR;
  sift(imgL, cv::Mat(), kpL, descL, true);
  sift(imgR, cv::Mat(), kpR, descR, true);
  //extractor->compute(imgL, kpL, descL);
  //extractor->compute(imgR, kpR, descR);

  /* radius match */
  const float MAX_RADIUS = 0.3*imgL.cols;
  cv::Mat mask = cv::Mat::zeros(kpL.size(), kpR.size(), CV_8U);
  for (size_t i = 0; i < kpL.size(); ++i)
    for (size_t j = 0; j < kpR.size(); ++j)
      mask.at<uchar>(i, j) = 
          (cv::norm(kpL[i].pt - kpR[j].pt) < MAX_RADIUS 
            && kpL[i].pt.x > kpR[j].pt.x
            && fabsf(kpL[i].pt.y - kpR[j].pt.y) < 10
           )? 1: 0;
  match.clear();
  matcher->match(descL, descR, match, mask);

  float dist_mean = 0.;
  float dist_var = 0.;
  float dist_max = -1;
  float dist_min = 10000;
  for (size_t i = 0; i < match.size(); ++i)
  {
    dist_max = std::max(dist_max, match[i].distance);
    if (match[i].distance > 0) dist_min = std::min(dist_min, match[i].distance);
    dist_mean += match[i].distance / match.size();
  }
  for (size_t i = 0; i < match.size(); ++i)
  {
    dist_var += pow(match[i].distance - dist_mean, 2) / match.size();
  }
  ROS_INFO("%.2f, %.2f, %.2f -- %.2f", dist_mean, sqrt(dist_var), dist_min, dist_max);

  std::vector<cv::DMatch>::iterator match_it = match.begin();
  while (match_it != match.end()) 
  {
    if (match_it->distance <= 0)// || match_it->distance > 3 * dist_min)
    {
      match_it = match.erase(match_it);
    }
    else
    {
      ++match_it;
    }
  }


#if 1
  /* mutual consistency check */
  int input_sz  = static_cast<int>(kpL.size());
  int target_sz = static_cast<int>(kpR.size());
  int match_sz = static_cast<int>(match.size());
  float fmax = std::numeric_limits<float>::max();

  cv::Mat cost(input_sz, target_sz, CV_32F, fmax);
  std::vector<int> map_match(input_sz, -1);

  for (int i = 0; i < match_sz; ++i) {
    cv::DMatch &m = match[i];
    if (m.distance < cost.at<float>(m.queryIdx, m.trainIdx)) {
      cost.at<float>(m.queryIdx, m.trainIdx) = m.distance;
      map_match[m.queryIdx] = i;
    }
  }

  std::vector<int> input_idx (input_sz, -1);
  std::vector<int> target_idx(target_sz, -1);

  double minVal = 0.0;
  cv::Point minLoc(-1, -1);
  for (int i = 0; i < input_sz; ++i) {
    cv::minMaxLoc(cost.row(i), &minVal, NULL, &minLoc, NULL);
    if (minVal < fmax) input_idx[i] = minLoc.x;
  }
  for (int i = 0; i < target_sz; ++i) {
    cv::minMaxLoc(cost.col(i), &minVal, NULL, &minLoc, NULL);
    if (minVal < fmax) target_idx[i] = minLoc.y;
  }

  std::vector<cv::DMatch> tmp_match;
  tmp_match.swap(match);
  match.clear();
  for (int i = 0; i < input_sz; ++i) {
    if (i == target_idx[input_idx[i]] && map_match.at(i) >= 0 && map_match.at(i) < tmp_match.size()) 
      match.push_back(tmp_match.at(map_match.at(i)));
  }
#endif

  // debug
  {
    cv::Mat disp(imgL.size(), CV_8UC3);
    cv::cvtColor(imgL, disp, CV_GRAY2BGR);
    ROS_INFO_STREAM(kpL.size() << ", " << kpR.size() << ", " << match.size());
    cv::drawMatches(imgL, kpL, imgR, kpR, match, disp);
    cv::Mat _disp(cv::Size(disp.cols*2, disp.rows*2), CV_8UC3);
    cv::resize(disp, _disp, _disp.size());
    cv::imshow("matches", _disp);
    cv::waitKey(1);

    int w = imgL.cols;
    int h = imgL.rows;
    cv::Mat disp2(cv::Size(w, h), CV_8UC3);
    cv::cvtColor(imgL, disp2, CV_GRAY2BGR);
    for (int i = 0; i < match.size(); ++i)
    {
      cv::Point2f ptL = kpL.at(match.at(i).queryIdx).pt;
      cv::Point2f ptR = kpR.at(match.at(i).trainIdx).pt;
      //ptR.y = ptL.y;

      cv::circle(disp2, ptL, 4, cv::Scalar(0, 0, 255), 4);
      cv::line(disp2, ptL, ptR, cv::Scalar(255, 0, 0), 2);
    }
    cv::Mat _disp2(cv::Size(disp2.cols/1, disp2.rows/1), CV_8UC3);
    cv::resize(disp2, _disp2, _disp2.size());
    cv::imshow("matches2", _disp2);
    cv::waitKey(1);
  }
}

void VisOd::triangulate(const cv::Mat &imgL, const cv::Mat &imgR, std::vector<cv::KeyPoint> &kpL, Cloud &cloud)
{
  /* feature extraction and matching */
  std::vector<cv::KeyPoint> kpR;
  cv::Mat _imgL(imgL.rows/4, imgL.cols/4, imgL.type());
  cv::Mat _imgR(imgR.rows/4, imgR.cols/4, imgR.type());
  cv::resize(imgL, _imgL, _imgL.size());
  cv::resize(imgR, _imgR, _imgR.size());
  extractFeatures(_imgL, kpL, "HARRIS", false);
  trackLK(_imgL, _imgR, kpL, kpR);
  //extractFeatures(_imgR, kpR, "GridHARRIS", false);
  std::vector<cv::DMatch> match;
  for (size_t i = 0; i < kpL.size(); ++i)
  {
    cv::DMatch m;
    if (kpL[i].pt.x - kpR[i].pt.x > 5)
      m.queryIdx = m.trainIdx = i;
    match.push_back(m);
  }
  //matchFeatures(_imgL, _imgR, kpL, kpR, match, "SIFT", "BruteForce");

  /* 
   * triangulation using Linear-LS algorithm 
   * See [Hartley and Sturm, "Triangulation," 1997]
   */
  cv::SVD svd;
  cv::Mat A(4, 4, CV_32F);
  cv::Mat X(4, 1, CV_32F);

  cloud.width  = 0;
  cloud.height = 1;
  cloud.is_dense = false;
  //cloud.points.resize(cloud.width * cloud.height);
  std::vector<cv::DMatch>::iterator match_it = match.begin();
  while (match_it != match.end())
  {
    cv::Point2f &mL = kpL[match_it->queryIdx].pt;
    cv::Point2f &mR = kpR[match_it->trainIdx].pt;
    mR.y = mL.y = 0.5 * (mL.y + mR.y);
    mL.x *= 4;
    mL.y *= 4;
    mR.x *= 4;
    mL.y *= 4;

    A.row(0) = PL_.row(2) * mL.x - PL_.row(0);
    A.row(1) = PL_.row(2) * mL.y - PL_.row(1);
    A.row(2) = PR_.row(2) * mR.x - PR_.row(0);
    A.row(3) = PR_.row(2) * mR.y - PR_.row(1);

#if 0
    //svd(A);
    //X = svd.vt(cv::Rect(0, 3, 4, 1)).t();
    cv::SVD::solveZ(A, X);
    PointType pt;
    pt.x = X.at<float>(0, 0) / X.at<float>(3, 0); 
    pt.y = X.at<float>(1, 0) / X.at<float>(3, 0);
    pt.z = X.at<float>(2, 0) / X.at<float>(3, 0);

    ROS_INFO_STREAM("AX: " << A * X);
    //pt.z = (PL_.at<float>(0, 0) + PR_.at<float>(0, 0)) * 0.5 * 0.31 / (mL.x - mR.x);
#elif 0

    cv::Mat AM = A(cv::Rect(0, 0, 3, 4));
    cv::Mat AV = A(cv::Rect(3, 0, 1, 4));
    X = (AM.t() * AM).inv() * (AM.t() * (-AV));
    ROS_INFO_STREAM("A: " << A);
    ROS_INFO_STREAM("AM: " << AM);
    ROS_INFO_STREAM("AX: " << AM * X + AV);

    PointType pt;
    pt.x = X.at<float>(0, 0);
    pt.y = X.at<float>(1, 0);
    pt.z = X.at<float>(2, 0);

#else

    PointType pt;
    float f = 0.5 * (PL_.at<float>(0, 0) + PR_.at<float>(0, 0));
    pt.z = f * 0.3095 / (mL.x - mR.x);
    pt.x = pt.z * (mL.x - PL_.at<float>(0, 2)) / PL_.at<float>(0, 0);
    pt.y = pt.z * (mL.y - PL_.at<float>(1, 2)) / PL_.at<float>(1, 1);
    //ROS_INFO_STREAM("pt: " << pt);

#endif

    if (pt.z > 0 && pt.z < 100)
    {
      //ROS_INFO("pz: %5.2f, fz: %5.2f", pt.z, PL_.at<float>(0, 0) * 0.31 / (mL.x-mR.x));
      //ROS_INFO_STREAM("match: " << mL.x - mR.x << " -- " << pt.z);

      ++cloud.width;
      cloud.points.push_back(pt);
      ++match_it;
    }
    else
    {
      match_it = match.erase(match_it);
    }
  }

  // debug
  {
    sensor_msgs::PointCloud2Ptr cloud_msg(new sensor_msgs::PointCloud2);
    pcl::toROSMsg(cloud, *cloud_msg);
    cloud_msg->header.stamp = ros::Time::now();
    cloud_msg->header.frame_id = "/rover/base_link";
    cloud_pub_.publish(cloud_msg);
  }
}

void VisOd::trackLK(const cv::Mat &img0, const cv::Mat &img1, std::vector<cv::KeyPoint> &kp0, std::vector<cv::KeyPoint> &kp1, std::vector<int> &indices)
{

  ROS_ASSERT(kp0.size() == indices.size());
  kp1.clear();

  std::vector<cv::Point2f> _kp0, _kp1;
  cv::KeyPoint::convert(kp0, _kp0);

  std::vector<uchar> status;
  std::vector<float> err;
  static cv::Size win_size(201, 201);
  int max_level = 7;
  static cv::TermCriteria criteria(cv::TermCriteria::COUNT + cv::TermCriteria::EPS, 30, 0.01 );
  double derivLambda = 0.5;
  int flags = 0;
  std::vector<cv::Mat> img0_pyr;
  std::vector<cv::Mat> img1_pyr;
  cv::buildOpticalFlowPyramid(img0, img0_pyr, win_size, max_level);
  cv::buildOpticalFlowPyramid(img1, img1_pyr, win_size, max_level);
  calcOpticalFlowPyrLK(img0_pyr, img1_pyr, _kp0, _kp1, status, err, win_size, max_level, criteria, derivLambda, flags);
  cv::KeyPoint::convert(_kp1, kp1);

  /* eliminate unmatched features */
  std::vector<cv::KeyPoint>::iterator kp0_it = kp0.begin();
  std::vector<cv::KeyPoint>::iterator kp1_it = kp1.begin();
  std::vector<int>::iterator ind_it = indices.begin();
  while (kp0_it != kp0.end() && kp1_it != kp1.end() && ind_it != indices.end())
  {
    int n = std::distance(kp0.begin(), kp0_it);
    if (!status.at(n)) 
    {
      kp0_it = kp0.erase(kp0_it);
      kp1_it = kp1.erase(kp1_it);
      ind_it = indices.erase(ind_it);
    }
    else
    {
      ++kp0_it;
      ++kp1_it;
      ++ind_it;
    }
  }

  // debug
  if (0)
  {
    cv::Mat disp(img1.size(), CV_8UC3);
    cv::cvtColor(img1, disp, CV_GRAY2BGR);
    for (size_t i = 0; i < kp0.size(); ++i)
    {
      //kp0[i].pt.y = kp1[i].pt.y;
      cv::circle(disp, kp0[i].pt, 2, cv::Scalar(0, 255, 0), 1);
      cv::circle(disp, kp1[i].pt, 2, cv::Scalar(255, 0, 0), 1);
      cv::line(disp, kp0[i].pt, kp1[i].pt, cv::Scalar(0, 255, 0), 2);
    }
    cv::imshow("keypoints", disp);
    cv::waitKey(1);
  }
}

void VisOd::projectPointsOntoGround(const std::vector<cv::KeyPoint> &kp, Cloud &cloud)
{

  ROS_ASSERT(kp.size() > 0);

  std::vector<cv::Point2f> image_pts;
  std::vector<cv::Point2f> obj_pts;
  cv::KeyPoint::convert(kp, image_pts);
  cv::perspectiveTransform(image_pts, obj_pts, H_);

  cloud.width  = obj_pts.size();
  cloud.height = 1;
  cloud.is_dense = false;
  cloud.points.resize(cloud.width * cloud.height);
  for (int i = 0; i < obj_pts.size(); ++i)
  {
    cloud.points[i].x = obj_pts[i].x;
    cloud.points[i].y = obj_pts[i].y;
    cloud.points[i].z = -0.0; // for better visibility
  }
  pcl::transformPointCloud(cloud, cloud, transform_); // in Lcamera coord
  
  // debug
  if (0)
  {
    publishCurrentTF();
    sensor_msgs::PointCloud2Ptr cloud_msg(new sensor_msgs::PointCloud2);
    pcl::toROSMsg(cloud, *cloud_msg);
    cloud_msg->header.stamp = ros::Time::now();
    cloud_msg->header.frame_id = "/rover/left_camera";
    cloud_pub_.publish(cloud_msg);
  }
}

void VisOd::adjustPath(const Cloud &cloud0, const std::vector<cv::KeyPoint> &kp1, cv::Mat &pTc, std::vector<int> &inliers)
{
  /* transform cloud */
  Cloud cloud_transformed;
  pcl::transformPointCloud(cloud0, cloud_transformed, transform_);

  /* convert data type */
  std::vector<cv::Point3f> obj_pts(cloud_transformed.points.size());
  for (size_t i = 0; i < cloud_transformed.points.size(); ++i)
  {
    obj_pts[i].x = cloud_transformed.points[i].x;
    obj_pts[i].y = cloud_transformed.points[i].y;
    obj_pts[i].z = cloud_transformed.points[i].z;
  }
  std::vector<cv::Point2f> img_pts;
  cv::KeyPoint::convert(kp1, img_pts);

  /* estimate motion */
  cv::Mat cTp = pTc.inv();
  cv::Mat K = PL_(cv::Rect(0, 0, 3, 3));
  cv::Mat D;
  cv::Mat _cRp;
  cv::Mat cRp = cTp(cv::Rect(0, 0, 3, 3));
  cv::Mat ctp = cTp(cv::Rect(3, 0, 1, 3));
  _TIMER_BEGIN_;
  cv::solvePnPRansac(obj_pts, img_pts, K, D, _cRp, ctp, true, RANSAC_ITERATIONS, 8.0, 1000, inliers, CV_ITERATIVE);
  cv::Rodrigues(_cRp, cRp);
  _TIMER_END_;

  //ROS_INFO_STREAM("R: " << cRp);
  //ROS_INFO_STREAM("t: " << ctp);
  cTp = cv::Mat::eye(4, 4, CV_32F);
  for (int i = 0; i < 3; ++i)
    for (int j = 0; j < 3; ++j)
      cTp.at<float>(i, j) = cRp.at<double>(i, j);
  for (int i = 0; i < 3; ++i)
    cTp.at<float>(i, 3) = ctp.at<double>(i, 0);

  pTc = cTp.inv();
}


void VisOd::estimateMotion(const Cloud &cloud0, const std::vector<cv::KeyPoint> &kp1, cv::Mat &pTc, std::vector<int> &inliers)
{
  /* convert data type */
  std::vector<cv::Point3f> obj_pts(cloud0.points.size());
  for (size_t i = 0; i < cloud0.points.size(); ++i)
  {
    obj_pts[i].x = cloud0.points[i].x;
    obj_pts[i].y = cloud0.points[i].y;
    obj_pts[i].z = cloud0.points[i].z;
  }
  std::vector<cv::Point2f> img_pts;
  cv::KeyPoint::convert(kp1, img_pts);

  /* estimate motion */
  cv::Mat K = PL_(cv::Rect(0, 0, 3, 3));
  cv::Mat D;
  cv::Mat _cRp;
  cv::Mat ctp;
  cv::Mat cRp;
  _TIMER_BEGIN_;
  cv::solvePnPRansac(obj_pts, img_pts, K, D, _cRp, ctp, false, RANSAC_ITERATIONS, 8.0, 1000, inliers, CV_P3P);
  cv::Rodrigues(_cRp, cRp);
  _TIMER_END_;

  //ROS_INFO_STREAM("R: " << cRp);
  //ROS_INFO_STREAM("t: " << ctp);
  cv::Mat cTp = cv::Mat::eye(4, 4, CV_32F);
  for (int i = 0; i < 3; ++i)
    for (int j = 0; j < 3; ++j)
      cTp.at<float>(i, j) = cRp.at<double>(i, j);
  for (int i = 0; i < 3; ++i)
    cTp.at<float>(i, 3) = ctp.at<double>(i, 0);

  pTc = cTp.inv();
}

void VisOd::estimateMotionP3P(const Cloud &cloud0, const std::vector<cv::KeyPoint> &kp1, cv::Mat &pTc, std::vector<int> &inliers)
{
  inliers.clear();
  std::vector<int> num_inliers;
  std::vector<Eigen::Affine3f> candidates;
  static boost::mt19937 random_idx_generator;

  if (cloud0.points.size() < 4)
  {
    pTc = cv::Mat::eye(4, 4, CV_32F);
    ROS_WARN("Motion estimation failed (few features)");
    return; 
  }

  /* normalize image vectors */
  Eigen::MatrixXf fvec(3, kp1.size());
  cv::Mat K = PL_(cv::Rect(0, 0, 3, 3));
  for (size_t i = 0; i < kp1.size(); ++i)
  {
    cv::Mat f(3, 1, CV_32F);
    f.at<float>(0, 0) = kp1[i].pt.x;
    f.at<float>(1, 0) = kp1[i].pt.y;
    f.at<float>(2, 0) = 1;
    double nm = 0.0;
    for (int ii = 0; ii < 3; ++ii) nm += pow(f.at<float>(ii, 0), 2);
    nm = sqrt(nm);
    for (int ii = 0; ii < 3; ++ii) fvec(ii, i) = f.at<float>(ii, 0) / nm;
  }
  
  /* start iterations */
  boost::uniform_int<> udist(0, cloud0.size() - 1);
  int npts = 3;
_TIMER_BEGIN_;
  for (int p = 0; p < RANSAC_ITERATIONS; ++p)
  {
    std::vector<int> indices;
    for (int i = 0; i < npts; ++i)
      indices.push_back(udist(random_idx_generator));

    using namespace Eigen;

    static P3p solver;
    TooN::Matrix<3,3> featureVectors;
    TooN::Matrix<3,3> worldPoints;
    TooN::Matrix<3,16> solutions;
    for (int i = 0; i < npts; ++i)
    {
      for (int j = 0; j < 3; ++j)
      {
        featureVectors(j, i) = fvec(j, indices[i]);
      }
      worldPoints(0, i) = cloud0.points[i].x;
      worldPoints(1, i) = cloud0.points[i].y;
      worldPoints(2, i) = cloud0.points[i].z;
    }
    solver.computePoses(featureVectors, worldPoints, solutions);

    for (int sol = 0; sol < 4; ++sol)
    {
      Eigen::Affine3f c;
      Eigen::Matrix3f m;
      Eigen::Vector3f v;
      for (int i = 0; i < 3; ++i)
      {
        v(i) = solutions(i, sol*4);
        for (int j = 0; j < 3; ++j)
          m(i, j) = solutions(i, sol*4+j+1);
      }
      c.linear() = m;
      c.translation() = v;
      if(!((c.linear().array() == c.linear().array())).all()) continue; // nan check
      ROS_INFO_STREAM("Candidate\n" << c.matrix());

      /* scoring candidate */
      Cloud cloud0_tr;
      pcl::transformPointCloud(cloud0, cloud0_tr, c.inverse());
      int num_good = 0;
      for (int i = 0; i < cloud0_tr.points.size(); ++i)
      {
        cv::Mat world_pt(3, 1, CV_32F);
        cv::Mat reproj_pt(3, 1, CV_32F);
        world_pt.at<float>(0, 0) = cloud0_tr.points[i].x;
        world_pt.at<float>(1, 0) = cloud0_tr.points[i].y;
        world_pt.at<float>(2, 0) = cloud0_tr.points[i].z;
        reproj_pt = K * world_pt;
        reproj_pt.at<float>(0, 0) /= reproj_pt.at<float>(2, 0);
        reproj_pt.at<float>(1, 0) /= reproj_pt.at<float>(2, 0);
        reproj_pt.at<float>(2, 0) /= reproj_pt.at<float>(2, 0);

        double l2_norm = std::pow(reproj_pt.at<float>(0, 0) - kp1[i].pt.x, 2)
            + std::pow(reproj_pt.at<float>(1, 0) - kp1[i].pt.y, 2);
        num_good += (l2_norm < 100 * RANSAC_THRESH)? 1: 0;
      }
      if (num_good < RANSAC_INLIER_THRESH * cloud0_tr.points.size()) continue;

      candidates.push_back(c.inverse());
      num_inliers.push_back(num_good);
    }
  }
_TIMER_END_;

  if (!num_inliers.size())
  {
    pTc = cv::Mat::eye(4, 4, CV_32F);
    ROS_WARN("Motion estimation failed (no good hypothesis)");
    return; 
  }

  size_t good_idx = std::distance(num_inliers.begin(), std::max_element(num_inliers.begin(), num_inliers.end()));
  //ROS_INFO("Good hypothesis = %lu of %lu", good_idx, num_inliers.size());
  
  /* find inliers */
  Cloud cloud0_tr;
  pcl::transformPointCloud(cloud0, cloud0_tr, candidates[good_idx]);
  for (int i = 0; i < cloud0_tr.points.size(); ++i)
  {
    cv::Mat world_pt(3, 1, CV_32F);
    cv::Mat reproj_pt(3, 1, CV_32F);
    world_pt.at<float>(0, 0) = cloud0_tr.points[i].x;
    world_pt.at<float>(1, 0) = cloud0_tr.points[i].y;
    world_pt.at<float>(2, 0) = cloud0_tr.points[i].z;
    reproj_pt = K * world_pt;
    reproj_pt.at<float>(0, 0) /= reproj_pt.at<float>(2, 0);
    reproj_pt.at<float>(1, 0) /= reproj_pt.at<float>(2, 0);
    reproj_pt.at<float>(2, 0) /= reproj_pt.at<float>(2, 0);

    double l2_norm = std::pow(reproj_pt.at<float>(0, 0) - kp1[i].pt.x, 2)
      + std::pow(reproj_pt.at<float>(1, 0) - kp1[i].pt.y, 2);
    if (l2_norm < 100 * RANSAC_THRESH) inliers.push_back(i);
  }

  for (int i = 0; i < 3; ++i)
    for (int j = 0; j < 3; ++j)
      pTc.at<float>(i, j) = candidates[good_idx].linear()(i, j);
  for (int i = 0; i < 3; ++i)
    pTc.at<float>(i, 3) = candidates[good_idx].translation()[i];
  //ROS_INFO_STREAM("pTc\n" << pTc);
}

void VisOd::estimateMotion3PT(const Cloud &cloud0, const Cloud &cloud1, cv::Mat &pTc, std::vector<int> &inliers)
{
  inliers.clear();
  std::vector<int> num_inliers;
  std::vector<Eigen::Affine3f> candidates;
  static boost::mt19937 random_idx_generator;
  static Eigen::Affine3f prev_c;

  if (cloud0.points.size() < 4)
  {
    pTc = cv::Mat::eye(4, 4, CV_32F);
    ROS_WARN("Motion estimation failed (few features)");
    return; 
  }
  
  /* start iterations */
  boost::uniform_int<> udist(0, cloud0.size() - 1);
  int npts = 4;
_TIMER_BEGIN_;
  for (int p = 0; p < RANSAC_ITERATIONS; ++p)
  {
    std::vector<int> indices;
    for (int i = 0; i < npts; ++i)
      indices.push_back(udist(random_idx_generator));

    using namespace Eigen;

    MatrixXf X(3, npts), Y(3, npts);
    for (int i = 0; i < npts; ++i)
    {
      X.col(i) << cloud0.points[indices[i]].x, 
                  cloud0.points[indices[i]].y, 
                  cloud0.points[indices[i]].z;
      Y.col(i) << cloud1.points[indices[i]].x, 
                  cloud1.points[indices[i]].y, 
                  cloud1.points[indices[i]].z;
    }
    using namespace std;
    Vector3f uX = X.rowwise().mean();
    Vector3f uY = Y.rowwise().mean();
    MatrixXf dX = X - uX.rowwise().replicate(X.cols());
    MatrixXf dY = Y - uY.rowwise().replicate(Y.cols());

    MatrixXf H = dX * dY.transpose();
    JacobiSVD<MatrixXf> svd(H, ComputeThinU | ComputeThinV);
    Eigen::Affine3f c;
    c.linear() = svd.matrixV() * svd.matrixU().transpose();
    if (c.linear().determinant() < 0) {
      //ROS_WARN("Error in computing rotation matrix (Arun)");
      continue;
    }
    c.translation() = uY - c.linear() * uX;
    if (c.translation()(2) > 0 || c.translation()(2) < -0.5
        || fabs(c.translation()(0)) > 0.3) 
    { c = prev_c; } // cont prev motion

    /* scoring candidate */
    Cloud cloud0_tr;
    pcl::transformPointCloud(cloud0, cloud0_tr, c);
    int num_good = 0;
    for (int i = 0; i < cloud0.points.size(); ++i)
    {
      float l2_norm = pcl::squaredEuclideanDistance(cloud0_tr.points[i], cloud1.points[i]);
      num_good += (l2_norm < RANSAC_THRESH)? 1: 0;
    }
    if (num_good < RANSAC_INLIER_THRESH * cloud0.points.size()) continue;

    candidates.push_back(c);
    num_inliers.push_back(num_good);
  }
_TIMER_END_;

  if (!num_inliers.size())
  {
    pTc = cv::Mat::eye(4, 4, CV_32F);
    ROS_WARN("Motion estimation failed (no good hypothesis)");
    return; 
  }

  size_t good_idx = std::distance(num_inliers.begin(), std::max_element(num_inliers.begin(), num_inliers.end()));
  //ROS_INFO("Good hypothesis = %lu of %lu", good_idx, num_inliers.size());
  
  /* find inliers */
  Cloud cloud0_tr;
  pcl::transformPointCloud(cloud0, cloud0_tr, candidates[good_idx]);
  for (int i = 0; i < cloud0_tr.points.size(); ++i)
  {
    float l2_norm = pcl::squaredEuclideanDistance(cloud0_tr.points[i], cloud1.points[i]);
    if (l2_norm < RANSAC_THRESH) inliers.push_back(i);
  }
  
  cv::Mat cTp = cv::Mat::eye(4, 4, CV_32F);
  for (int i = 0; i < 3; ++i)
    for (int j = 0; j < 3; ++j)
      cTp.at<float>(i, j) = candidates[good_idx].linear()(i, j);
  for (int i = 0; i < 3; ++i)
    cTp.at<float>(i, 3) = candidates[good_idx].translation()[i];
  pTc = cTp.inv();
  //ROS_INFO_STREAM("pTc\n" << pTc);
  prev_c = candidates[good_idx];
}

void VisOd::estimateMotion2PT(const Cloud &cloud0, const Cloud &cloud1, const std::vector<cv::KeyPoint> &kp0, const std::vector<cv::KeyPoint> &kp1, cv::Mat &pTc, std::vector<int> &inliers)
{
  inliers.clear();
  std::vector<int> num_inliers;
  std::vector<Eigen::Affine3f> candidates;
  static boost::mt19937 random_idx_generator_n;
  static boost::mt19937 random_idx_generator_d;
  static Eigen::Affine3f prev_c;

  /* select points by distance */
  std::vector<int> near_idx, dist_idx;
  for (size_t i = 0; i < cloud0.points.size(); ++i)
  {
    if (cloud0.points[i].z > 10) dist_idx.push_back(i);
    else near_idx.push_back(i);
  }
  if (near_idx.size() == 0 || dist_idx.size() == 0) 
  {
    pTc = cv::Mat::eye(4, 4, CV_32F);
    ROS_WARN("Motion estimation failed (few features)");
    return; 
  }
  
  /* start iterations */
  boost::uniform_int<> d_near(0, near_idx.size() - 1);
  boost::uniform_int<> d_dist(0, dist_idx.size() - 1);
  int n_npts = 1;
  int n_dpts = 1;
_TIMER_BEGIN_;
  for (int p = 0; p < RANSAC_ITERATIONS; ++p)
  {
    std::vector<int> ni;
    std::vector<int> di;
    for (int i = 0; i < n_npts; ++i)
      ni.push_back(d_near(random_idx_generator_n));
    for (int i = 0; i < n_dpts; ++i)
      di.push_back(d_dist(random_idx_generator_d));

    using namespace Eigen;

    /* Orientation */
    Vector3f d0, d1;
    d0 << cloud0.points[di[0]].x,
          cloud0.points[di[0]].y,
          cloud0.points[di[0]].z;
    d1 << cloud1.points[di[0]].x,
          cloud1.points[di[0]].y,
          cloud1.points[di[0]].z;
    d0.normalize();
    d1.normalize();
    Vector3f z;
    z << 0, 0, 1;

    Vector3f n0, n1;
    n0 = z.cross(d0);
    n1 = z.cross(d1);
    n0.normalize();
    n1.normalize();

    double theta0, theta1;
    theta0 = std::acos(z.dot(d0));
    theta1 = std::acos(z.dot(d1));

    Matrix3f zR0, zR1;
    rodrigues(n0, theta0, zR0);
    rodrigues(n1, theta1, zR1);

    /* Translation */
    MatrixXf X(3, n_npts), Y(3, n_npts);
    for (int i = 0; i < n_npts; ++i)
    {
      X.col(i) << cloud0.points[ni[i]].x, 
                  cloud0.points[ni[i]].y, 
                  cloud0.points[ni[i]].z;
      Y.col(i) << cloud1.points[ni[i]].x, 
                  cloud1.points[ni[i]].y, 
                  cloud1.points[ni[i]].z;
    }
    Vector3f uX = X.rowwise().mean();
    Vector3f uY = Y.rowwise().mean();
    Vector3f uX_z = zR0 * uX;
    Vector3f uY_z = zR1 * uY;

    // TODO should be modified
    MatrixXf m0(3, n_npts), m1(3, n_npts);
    for (int i = 0; i < n_npts; ++i)
    {
      m0.col(i) << cloud0.points[ni[i]].x / cloud0.points[ni[i]].z,
                   cloud0.points[ni[i]].y / cloud0.points[ni[i]].z,
                   1;
      m1.col(i) << cloud1.points[ni[i]].x / cloud1.points[ni[i]].z,
                   cloud1.points[ni[i]].y / cloud1.points[ni[i]].z,
                   1;
    }
    MatrixXf mz0(3, n_npts), mz1(3, n_npts);
    mz0 = zR0 * m0;
    mz1 = zR1 * m1;

    double x0 = uX_z(0);  double y0 = uX_z(1);  double z0 = uX_z(2);
    double x1 = uY_z(0);  double y1 = uY_z(1);  double z1 = uY_z(2);
    double u0 = mz0(0) / mz0(2);  double v0 = mz0(1) / mz0(2); 
    double u1 = mz1(0) / mz1(2);  double v1 = mz1(1) / mz1(2);

    double a1 = -v1*x0-v0*x1+u1*y0+u0*y1+(v0*u1-u0*v1)*(z1-z0);
    double a2 = -u1*x0+u0*x1-v1*y0+v0*y1-(u0*u1+v0*v1)*(z1-z0);
    double a3 =  v1*x1-u1*y1+v0*x0-u0*y0;
    double b2 = pow(a1, 2) + pow(a2, 2);
    double b1 = 2 * a1 * a3;
    double b0 = pow(a3, 2) - pow(a2, 2);

    if (pow(b1, 2) - 4 * b2 * b0 < 0)
    {
      //ROS_WARN("no real solutions");
      continue;
    }
    double c0 = (-b1 + sqrt(pow(b1, 2) - 4 * b2 * b0)) / (2 * b2);
    double c1 = (-b1 - sqrt(pow(b1, 2) - 4 * b2 * b0)) / (2 * b2);
    double s0 = - (a1 * c0 + a3) / a2;
    double s1 = - (a1 * c1 + a3) / a2;

    //double psi0 = atan2(s0, c0);
    //double psi1 = atan2(s1, c1);
    std::vector<double> psi;
    psi.push_back(atan2(s0, c0));
    psi.push_back(atan2(s1, c1));

    //for (int s = 0; s < ((psi[0] == psi[1])? 1: 2); ++s)
    for (int s = 0; s < 1; ++s)
    {
      Matrix3f Rpsi;
      rodrigues(z, psi[s], Rpsi);

      Eigen::Affine3f c;
      c.linear() = zR1.transpose() * Rpsi * zR0;
      c.translation() = zR1.transpose() * (uY_z - Rpsi * uX_z);
      //c = c.inverse();
      if (c.translation()(2) > 0 || c.translation()(2) < -1.0) { c = prev_c; } // cont prev motion

      /* scoring candidate */
      Cloud cloud0_tr;
      pcl::transformPointCloud(cloud0, cloud0_tr, c);
      int num_good = 0;
      for (int i = 0; i < cloud0.points.size(); ++i)
      {
        float l2_norm = pcl::squaredEuclideanDistance(cloud0_tr.points[i], cloud1.points[i]);
        num_good += (l2_norm < RANSAC_THRESH)? 1: 0;
      }
      if (num_good < RANSAC_INLIER_THRESH * cloud0.points.size()) continue;

      candidates.push_back(c);
      num_inliers.push_back(num_good);
    }
  }
_TIMER_END_;

  if (!num_inliers.size())
  {
    pTc = cv::Mat::eye(4, 4, CV_32F);
    ROS_WARN("Motion estimation failed (no good hypothesis)");
    return; 
  }

  size_t good_idx = std::distance(num_inliers.begin(), std::max_element(num_inliers.begin(), num_inliers.end()));
  ROS_INFO("Good hypothesis = %lu of %lu", good_idx, num_inliers.size());

  /* find inliers */
  Cloud cloud0_tr;
  pcl::transformPointCloud(cloud0, cloud0_tr, candidates[good_idx]);
  for (int i = 0; i < cloud0.points.size(); ++i)
  {
    float l2_norm = pcl::squaredEuclideanDistance(cloud0_tr.points[i], cloud1.points[i]);
    if (l2_norm < RANSAC_THRESH) inliers.push_back(i);
  }

  /* convert to cv::Mat */
  cv::Mat cTp = cv::Mat::eye(4, 4, CV_32F);
  for (int i = 0; i < 3; ++i)
    for (int j = 0; j < 3; ++j)
      cTp.at<float>(i, j) = candidates[good_idx].linear()(i, j);
  for (int i = 0; i < 3; ++i)
    cTp.at<float>(i, 3) = candidates[good_idx].translation()[i];
  pTc = cTp.inv();
  //ROS_INFO_STREAM("pTc\n" << pTc);
  prev_c = candidates[good_idx];
}

void VisOd::publishCurrentTF(const bool publish_path)
{
  ros::Time now = ros::Time::now();

  cv::Mat mTb = mTc_ * cTb_;
  tf::Vector3 position(mTb.at<float>(0, 3), mTb.at<float>(1, 3), mTb.at<float>(2, 3));
  tf::Matrix3x3 rotation;
  tf::Quaternion quaternion;
  for (int i = 0; i < 3; i++)
    for (int j = 0; j < 3; j++)
      rotation[i][j] = mTb.at<float>(i, j);
  rotation.getRotation(quaternion);
  
  tf::Transform pose_tf(quaternion, position);
  tf_pub_.sendTransform(tf::StampedTransform(pose_tf, now, "/map", "/rover/base_link"));

#if 0
  if (publish_path)
  {
    tf::Stamped<tf::Pose> pose_tf_stamped(pose_tf, now, "/rover/base_link");
    geometry_msgs::PoseStamped pose_msg;
    tf::poseStampedTFToMsg(pose_tf_stamped, pose_msg);
    path_.poses.push_back(pose_msg);
    path_.header.frame_id = "/map";
    path_.header.stamp = now;
    path_pub_.publish(path_);
  }
#endif
}

void VisOd::applyPlanarApproximation(cv::Mat &mTc)
{
  cv::Mat mTb = mTc * cTb_;
  mTb.at<float>(2, 3) = 0;  // z
  mTb.at<float>(2, 0) = mTb.at<float>(2, 1) = 0;
  mTb.at<float>(0, 2) = mTb.at<float>(1, 2) = 0;
  mTb.at<float>(2, 2) = 1;
  float f0 = cv::norm(mTb.col(0));
  float f1 = cv::norm(mTb.col(1));
  mTb.at<float>(0, 0) /= f0;
  mTb.at<float>(1, 0) /= f0;
  mTb.at<float>(0, 1) /= f1;
  mTb.at<float>(1, 1) /= f1;
  mTc = mTb * cTb_.inv();
}

}  // namespace mrover

int main(int ac, char **av)
{
  ros::init(ac, av, "VisOd");
  ros::NodeHandle nh;
  mrover::VisOd handler(nh);
  ros::spin();
  return 0;
}

