
#include <caros/hand_detection/camera_processing.h>
#include <caros/hand_detection/global.h>
#include <caros/hand_detection/kernels.h>

#include <vector>
#include <math.h>

CameraProcessing::CameraProcessing() : is_init_(false), lost_track_(true)
{
}

void CameraProcessing::init(unsigned int CAM_HEIGHT_, unsigned int CAM_WIDTH_)
{
  is_init_ = true;
  CAM_HEIGHT_ = CAM_HEIGHT_;
  CAM_WIDTH_ = CAM_WIDTH_;
  // MATs pre-allocation (save time later)
  m_blended_.create(CAM_HEIGHT_, CAM_WIDTH_, CV_8U);
  m_temp1_8U_.create(CAM_HEIGHT_, CAM_WIDTH_, CV_8U);
  m_temp0_32F_.create(CAM_HEIGHT_, CAM_WIDTH_, CV_32F);
  m_temp0_64F_.create(CAM_HEIGHT_, CAM_WIDTH_, CV_64F);
  m_flood_fill_8U_.create(CAM_HEIGHT_, CAM_WIDTH_, CV_8UC1);
  m_distance_8U_.create(CAM_HEIGHT_, CAM_WIDTH_, CV_8U);

  // vector pre allocation
  m_theta_vector_.resize(360, 0.0);
  m_theta_conv_.resize(360, 0.0);
  m_conv_kernel_ = std::vector<double>(GAUSSIAN_9);
  m_peaks_.resize(5, 0);

  // init kalman filter
  initTracker(0.5f);

  for (size_t i = 0; i < parametersTable.size(); i++)
    m_parameters_[parametersTable.at(i).m_id_] = parametersTable.at(i).m_default_;

  updateParameters();
}

CameraProcessing::~CameraProcessing()
{
  delete m_tracker_;
}

void CameraProcessing::updateParameters()
{
  m_output_ = m_parameters_["output"];
  m_blend_ = m_parameters_["blend"] / 100.0;
  m_sigma_ = 1.f / (2.f * std::pow((double)m_parameters_["sigma"], 2));
  m_median_ = m_parameters_["median"];
  m_threshold_ = m_parameters_["threshold"];
  m_search_multiplier_ = m_parameters_["search_multiplier"];
}

void CameraProcessing::setParameters(const std::string& _parameterId, const int& _value)
{
  m_parameters_[_parameterId] = _value;
  updateParameters();
}

namespace {
  cv::Rect getROI(processResult data, cv::Size imgSize){
    int x = data.x - data.radius;
    if (x < 0)
    {
      x = 0;
    }
    int y = data.y - data.radius;
    if (y < 0)
    {
      y = 0;
    }
    int width = data.radius * 2;
    if (x + width > imgSize.width)
    {
      width = imgSize.width - x;
    }
    int height = data.radius * 2;
    if (y + height > imgSize.height)
    {
      height = imgSize.height - y;
    }
    return cv::Rect(x, y, width, height);
  }
}

void CameraProcessing::processImg(const cv::Mat& src, cv::Mat& dst)
{
  if (!isInit())
  {
    std::cout << "[caros_leapmotion]: ImgProcesser not initialized" << std::endl;
    return;
  }
  bool valid = true;
  cv::threshold(src, m_flood_fill_8U_, m_threshold_, 255, cv::THRESH_BINARY);

  if (!lost_track_)
  {
    cv::Mat mask(src.size(),CV_8UC1);
    cv::circle(mask,cv::Point(latest_result_.x,latest_result_.y),latest_result_.radius*3,cv::Scalar(255),-1);
    cv::bitwise_and(m_flood_fill_8U_,mask, m_flood_fill_8U_);
  }

  // compute distance transform
  cv::distanceTransform(m_flood_fill_8U_, m_temp0_32F_, cv::DIST_L2, cv::DIST_MASK_PRECISE);
  m_temp0_32F_.convertTo(m_temp0_64F_, CV_64F);
  m_temp0_64F_.convertTo(m_distance_8U_, CV_8U);

  // find max of distance transform (center of the hand)
  double max;
  cv::Point maxDistance = cv::Point();
  cv::minMaxLoc(m_temp0_64F_, 0, &max, 0, &maxDistance);

  // apply kalman filtering on the center and radius of the hand
  trackCenter(maxDistance, max);

  // convert to polar coordinates (origin = center of the hand) and find distance transform intensities for each theta
  // on the polar coordinates space
  const double* index;
  for (double& e : m_theta_vector_)
    e = 0;
  int theta;
  for (int i = 0; i < m_temp0_64F_.rows; i++)
  {
    index = m_temp0_64F_.ptr<double>(i);
    for (int j = 0; j < m_temp0_64F_.cols; j++)
    {
      if (index[j] > 5)
      {
        theta = std::floor(std::atan2((double)j - maxDistance.x, maxDistance.y - (double)i) * 180 / 3.14f) + 180;
        if (theta < 300 && theta > 60)
          m_theta_vector_[theta] = m_theta_vector_.at(theta) + index[j];
      }
    }
  }

  // smooth the theta densities vector (convolution with a gaussian kernel)
  int len = m_conv_kernel_.size();
  for (int i = len - 1; i < 360; i++)
  {
    m_theta_conv_[i] = 0;
    for (int j = 0; j < len; j++)
      m_theta_conv_[i] += m_theta_vector_.at(i - j) * m_conv_kernel_.at(j);
  }

  // find peaks of the theta vector (fingers)
  int k = 0;
  for (int& e : m_peaks_)
    e = 0;
  for (int i = 2; i < 358; i++)
  {
    if (m_theta_conv_.at(i) > m_theta_conv_.at(i - 1) && m_theta_conv_.at(i) > m_theta_conv_.at(i - 2) &&
        m_theta_conv_.at(i) > m_theta_conv_.at(i + 1) && m_theta_conv_.at(i) > m_theta_conv_.at(i + 2) &&
        m_theta_conv_.at(i) > 0.15)
      m_peaks_[k++] = i;
    if (k >= 5)
      break;
  }

  if (valid){
    valid = max > 5;
  }

  // output selector
  if (m_output_ == 0)
    m_temp1_8U_ = m_flood_fill_8U_;
  else if (m_output_ == 1)
    m_temp1_8U_ = m_distance_8U_ * 2;
  else if (m_output_ == 2 && valid)
  {
    m_temp1_8U_ = m_distance_8U_ * 2;
    cv::circle(m_temp1_8U_, maxDistance, max * 3.f, cv::Scalar(255), 4);
  }
  else
    m_temp1_8U_ = src;

  // blend processed frame with original
  if (m_output_ < 5)
    cv::addWeighted(src, 1 - m_blend_, m_temp1_8U_, m_blend_, 0.0, m_blended_);
  else
    m_blended_ = m_temp1_8U_;

  m_blended_.copyTo(dst);
  latest_result_ = {maxDistance.x, maxDistance.y, max, valid};
  lost_track_ = !valid;

  // emit sendCenterPosition(maxDistance.x, maxDistance.y, max);
  // emit sendFingers(m_peaks_);
  

}

void CameraProcessing::trackCenter(cv::Point& _point, double& _radius)
{
  m_tracker_measurement_.at<float>(0, 0) = (float)_point.x;
  m_tracker_measurement_.at<float>(1, 0) = (float)_point.y;
  m_tracker_measurement_.at<float>(2, 0) = (float)_radius;

  double precTick = m_tracker_ticks_;
  m_tracker_ticks_ = (double)cv::getTickCount();
  double dT = (m_tracker_ticks_ - precTick) / cv::getTickFrequency();  // seconds

  m_tracker_->transitionMatrix.at<float>(0, 2) = dT;
  m_tracker_->transitionMatrix.at<float>(1, 3) = dT;

  m_tracker_prediction_ = m_tracker_->predict();
  cv::Mat estimate = m_tracker_->correct(m_tracker_measurement_);
  m_tracker_measurement_.at<float>(0, 0) = estimate.at<float>(0, 0);
  m_tracker_measurement_.at<float>(1, 0) = estimate.at<float>(1, 0);
  m_tracker_measurement_.at<float>(2, 0) = estimate.at<float>(4, 0);

  _point.x = (int)m_tracker_prediction_.at<float>(0, 0);
  _point.y = (int)m_tracker_prediction_.at<float>(1, 0);
  _radius = (double)m_tracker_prediction_.at<float>(4, 0);
}

void CameraProcessing::initTracker(double _noiseCov)
{
  m_tracker_ticks_ = 0;
  m_tracker_measurement_.create(3, 1, CV_32F);
  m_tracker_prediction_.create(5, 1, CV_32F);
  m_tracker_ = new cv::KalmanFilter(5, 3, 0);

  cv::setIdentity(m_tracker_->transitionMatrix);
  m_tracker_->statePre.at<float>(0) = (float)CAM_WIDTH_ / 2.f;
  m_tracker_->statePre.at<float>(1) = (float)CAM_HEIGHT_ / 2.f;
  m_tracker_->statePre.at<float>(2) = 0.f;
  m_tracker_->statePre.at<float>(3) = 0.f;
  m_tracker_->statePre.at<float>(4) = 100.f;

  cv::setIdentity(m_tracker_->measurementMatrix);
  m_tracker_->measurementMatrix = cv::Mat::zeros(3, 5, CV_32F);
  m_tracker_->measurementMatrix.at<float>(0, 0) = 1.0f;
  m_tracker_->measurementMatrix.at<float>(1, 1) = 1.0f;
  m_tracker_->measurementMatrix.at<float>(2, 4) = 1.0f;

  m_tracker_->processNoiseCov.at<float>(0, 0) = 0.1f;
  m_tracker_->processNoiseCov.at<float>(1, 1) = 0.1f;
  m_tracker_->processNoiseCov.at<float>(2, 2) = 2.f;
  m_tracker_->processNoiseCov.at<float>(3, 3) = 2.f;
  m_tracker_->processNoiseCov.at<float>(4, 4) = 0.1f;

  cv::setIdentity(m_tracker_->measurementNoiseCov, cv::Scalar::all(_noiseCov));
}
