#include <memory>
#include <random>
#include <set>
#include "gl_render.h"
#include "body.h"
#include "camera.h"
#include "region_model.h"

static constexpr int kContourNormalApproxRadius = 3;
static constexpr int kMinContourLength = 15;
static constexpr int kMaxPointSamplingTries = 100;
static constexpr float kMaxSurfaceGradient = 10.0f;
static constexpr int kImageSize = 2000;

static constexpr uchar kBackgroundID = 0;
static constexpr uchar kMainBodyID = 255;
static constexpr uchar kDifferentBodyID = 120;

struct CompareSmallerVector3f {
  bool operator()(const Eigen::Vector3f& v1,
    const Eigen::Vector3f& v2) const {
    return v1[0] < v2[0] || (v1[0] == v2[0] && v1[1] < v2[1]) ||
      (v1[0] == v2[0] && v1[1] == v2[1] && v1[2] < v2[2]);
  }
};

void SubdivideTriangle(
  const Eigen::Vector3f& v1, const Eigen::Vector3f& v2,
  const Eigen::Vector3f& v3, int n_divides,
  std::set<Eigen::Vector3f, CompareSmallerVector3f>* geodesic_points)
{
  if (n_divides == 0) {
    geodesic_points->insert(v1);
    geodesic_points->insert(v2);
    geodesic_points->insert(v3);
  }
  else {
    Eigen::Vector3f v12 = (v1 + v2).normalized();
    Eigen::Vector3f v13 = (v1 + v3).normalized();
    Eigen::Vector3f v23 = (v2 + v3).normalized();
    SubdivideTriangle(v1, v12, v13, n_divides - 1, geodesic_points);
    SubdivideTriangle(v2, v12, v23, n_divides - 1, geodesic_points);
    SubdivideTriangle(v3, v13, v23, n_divides - 1, geodesic_points);
    SubdivideTriangle(v12, v13, v23, n_divides - 1, geodesic_points);
  }
}

void GenerateViewPoints(int n_divides, std::vector<Eigen::Vector3f>* view_points) {
  constexpr float x = 0.525731112119133606f;
  constexpr float z = 0.850650808352039932f;
  std::vector<Eigen::Vector3f> icosahedron_points{
      {-x, 0.0f, z}, {x, 0.0f, z},  {-x, 0.0f, -z}, {x, 0.0f, -z},
      {0.0f, z, x},  {0.0f, z, -x}, {0.0f, -z, x},  {0.0f, -z, -x},
      {z, x, 0.0f},  {-z, x, 0.0f}, {z, -x, 0.0f},  {-z, -x, 0.0f} };
  std::vector<std::array<int, 3>> icosahedron_ids{
      {0, 4, 1},  {0, 9, 4},  {9, 5, 4},  {4, 5, 8},  {4, 8, 1},
      {8, 10, 1}, {8, 3, 10}, {5, 3, 8},  {5, 2, 3},  {2, 7, 3},
      {7, 10, 3}, {7, 6, 10}, {7, 11, 6}, {11, 0, 6}, {0, 1, 6},
      {6, 1, 10}, {9, 0, 11}, {9, 11, 2}, {9, 2, 5},  {7, 2, 11} };

  std::set<Eigen::Vector3f, CompareSmallerVector3f> points_set;
  for (const auto& icosahedron_id : icosahedron_ids) {
    SubdivideTriangle(icosahedron_points[icosahedron_id[0]],
      icosahedron_points[icosahedron_id[1]],
      icosahedron_points[icosahedron_id[2]], n_divides,
      &points_set);
  }

  *view_points = std::vector<Eigen::Vector3f>(points_set.begin(), points_set.end());
}

void GenerateViewPoses(
  const std::vector<Eigen::Vector3f>& view_points,
  float sphere_radius,
  std::vector<Pose>* camera2body_poses)
{
  Eigen::Vector3f downwards{ 0.f, 0.f, -1.f };
  camera2body_poses->clear();
  for (const auto& view_point : view_points) {
    Pose pose;
    pose = Eigen::Translation<float, 3>{ view_point * sphere_radius };

    Eigen::Matrix3f Rotation;
    Rotation.col(2) = -view_point;
    if (view_point[0] == 0.f && view_point[1] == 0.f) {
      Rotation.col(0) = Eigen::Vector3f(1, 0, 0);
    }
    else {
      Rotation.col(0) = downwards.cross(-view_point).normalized();
    }
    Rotation.col(1) = Rotation.col(2).cross(Rotation.col(0));
    pose.rotate(Rotation);
    camera2body_poses->push_back(std::move(pose));
  }
}

Render* SetupRender(Body* body_ptr, float sphere_radius) {
  float max_body_diameter = 0.2f;
  const int safe_boundary = 20;
  float focal_length =
    0.5f * float(kImageSize - safe_boundary) /
    tanf(asinf(0.5f * max_body_diameter / sphere_radius));
  float principal_point = float(kImageSize) / 2.0f;
  Intrinsics intrinsics = {
    focal_length, focal_length,
    principal_point, principal_point,
    kImageSize, kImageSize };

  auto render = GLRender::Build(intrinsics);
  render->AddBody(body_ptr);
  return render;
}

bool GenerateValidContours(
  const cv::Mat& silhouette_image,
  std::vector<std::vector<cv::Point2i>>* contours,
  int* pixel_contour_length)
{
#if 0
  // Set everything except for main body to black
  int kMainBodyID = 1;
  cv::Mat main_body_silhouette_image;
  cv::Mat lookUpTable(1, 256, CV_8U, cv::Scalar(0));
  lookUpTable.data[kMainBodyID] = kMainBodyID;
  cv::LUT(silhouette_image, lookUpTable, main_body_silhouette_image);
#endif
  auto retr_mode = cv::RetrievalModes::RETR_LIST;
  //auto retr_mode = cv::RetrievalModes::RETR_EXTERNAL;
  cv::findContours(silhouette_image, *contours,
    retr_mode, cv::CHAIN_APPROX_NONE);

  // Filter contours that are too short
  contours->erase(std::remove_if(begin(*contours), end(*contours),
    [](const std::vector<cv::Point2i>& contour) {
      return contour.size() < kMinContourLength;
    }),
    end(*contours));

  // Test if contours are closed
  for (auto& contour : *contours) {
    if (abs(contour.front().x - contour.back().x) > 1 ||
      abs(contour.front().y - contour.back().y) > 1) {
      std::cerr << "Contours are not closed." << std::endl;
      return false;
    }
  }

  // Calculate total pixel length of contour
  *pixel_contour_length = 0;
  for (auto& contour : *contours) {
    *pixel_contour_length += int(contour.size());
  }

  // Check if pixel length is greater zero
  if (*pixel_contour_length == 0) {
    std::cerr << "No valid contour in image." << std::endl;
  }
#if 1
  cv::Mat buf(silhouette_image.size(), CV_8U);
  buf = 0;
  cv::drawContours(buf, *contours, -1, cv::Scalar::all(255));
  cv::Mat small_buff;
  cv::resize(buf, small_buff, buf.size() / 2);
  imshow("contour", small_buff);
  cv::waitKey();
#endif
  return true;
}

bool IsContourPointValid(
  float max_depth_difference,
  const cv::Point2i& image_coordinates,
  const Render* render)
{
  std::vector<cv::Point2i> neighboring_points = {
      cv::Point2i(image_coordinates.x, image_coordinates.y + 1),
      cv::Point2i(image_coordinates.x, image_coordinates.y - 1),
      cv::Point2i(image_coordinates.x + 1, image_coordinates.y),
      cv::Point2i(image_coordinates.x - 1, image_coordinates.y),
  };

  // Fixed body with bigger average depth than center
  float sum_depth_neighboring_fixed_body_pixels = 0;
  int number_neighboring_fixed_body_pixels = 0;
  for (auto& point : neighboring_points) {
    if (render->silhouette_image().at<uchar>(point) == kDifferentBodyID) {
      sum_depth_neighboring_fixed_body_pixels += render->Depth(point);
      number_neighboring_fixed_body_pixels++;
    }
  }
  if (number_neighboring_fixed_body_pixels > 0) {
    if (sum_depth_neighboring_fixed_body_pixels /
      float(number_neighboring_fixed_body_pixels) <
      render->Depth(image_coordinates) - max_depth_difference)
      return false;
  }
  return true;
}

cv::Point2i SampleContourPointCoordinate(
  const std::vector<cv::Point2i>& valid_contour_points,
  std::mt19937& generator)
{
  int idx = int(generator() % valid_contour_points.size());
  return valid_contour_points[idx];
}

bool CalculateContourSegment(
  const std::vector<std::vector<cv::Point2i>>& contours,
  cv::Point2i& center,
  std::vector<cv::Point2i>* contour_segment)
{
  for (auto& contour : contours) {
    for (int idx = 0; idx < contour.size(); ++idx) {
      if (contour.at(idx) == center) {
        int start_idx = idx - kContourNormalApproxRadius;
        int end_idx = idx + kContourNormalApproxRadius;
        if (start_idx < 0) {
          contour_segment->insert(end(*contour_segment),
            end(contour) + start_idx, end(contour));
          start_idx = 0;
        }
        if (end_idx >= int(contour.size())) {
          contour_segment->insert(end(*contour_segment),
            begin(contour) + start_idx, end(contour));
          start_idx = 0;
          end_idx = end_idx - int(contour.size());
        }
        contour_segment->insert(end(*contour_segment),
          begin(contour) + start_idx,
          begin(contour) + end_idx + 1);

        // Check quality of contour segment
        float segment_distance = std::hypotf(
          float(contour_segment->back().x - contour_segment->front().x),
          float(contour_segment->back().y - contour_segment->front().y));
        return segment_distance > float(kContourNormalApproxRadius);
      }
    }
  }
  std::cerr << "Could not find point on contour" << std::endl;
  return false;
}

void FindClosestContourPoint(
  const std::vector<std::vector<cv::Point2i>>& contours, float u, float v,
  int* u_contour, int* v_contour) {
  float min_distance = std::numeric_limits<float>::max();
  for (auto& contour : contours) {
    for (auto& point : contour) {
      float distance = std::hypotf(float(point.x) - u, float(point.y) - v);
      if (distance < min_distance) {
        *u_contour = point.x;
        *v_contour = point.y;
        min_distance = distance;
      }
    }
  }
}

void CalculateLineDistances(
  const Render* render,
  const std::vector<std::vector<cv::Point2i>>& contours,
  const cv::Point2i& center, const Eigen::Vector2f& normal,
  float pixel_to_meter, float* foreground_distance,
  float* background_distance)
{
  // Define which images are used to calculate line distances
  const auto& foreground_silhouette_image = render->silhouette_image();
  const auto& background_silhouette_image = render->silhouette_image();

  // Calculate starting positions and steps for both sides of the line
  float u_out = float(center.x) + 0.5f;
  float v_out = float(center.y) + 0.5f;
  float u_in = float(center.x) + 0.5f;
  float v_in = float(center.y) + 0.5f;
  float u_step, v_step;
  if (std::fabs(normal.y()) < std::fabs(normal.x())) {
    u_step = float(sgn(normal.x()));
    v_step = normal.y() / abs(normal.x());
  }
  else {
    u_step = normal.x() / abs(normal.y());
    v_step = float(sgn(normal.y()));
  }

  // Search for first inwards intersection with contour
  int u_in_endpoint, v_in_endpoint;
  while (true) {
    u_in -= u_step;
    v_in -= v_step;
    if (foreground_silhouette_image.at<uchar>(int(v_in), int(u_in)) !=
      kMainBodyID) {
      FindClosestContourPoint(contours, u_in + u_step - 0.5f,
        v_in + v_step - 0.5f, &u_in_endpoint,
        &v_in_endpoint);
      *foreground_distance =
        pixel_to_meter * hypotf(float(u_in_endpoint - center.x),
          float(v_in_endpoint - center.y));
      break;
    }
  }

  // Search for first outwards intersection with contour
  int u_out_endpoint, v_out_endpoint;
  while (true) {
    u_out += u_step;
    v_out += v_step;
    if (int(u_out) < 0 || int(u_out) >= kImageSize || int(v_out) < 0 ||
      int(v_out) >= kImageSize) {
      *background_distance = std::numeric_limits<float>::max();
      break;
    }
    if (background_silhouette_image.at<uchar>(int(v_out), int(u_out)) ==
      kMainBodyID) {
      FindClosestContourPoint(contours, u_out - 0.5f, v_out - 0.5f,
        &u_out_endpoint, &v_out_endpoint);
      *background_distance =
        pixel_to_meter * hypotf(float(u_out_endpoint - center.x),
          float(v_out_endpoint - center.y));
      break;
    }
  }
}

Eigen::Vector2f ApproximateNormalVector(
  const std::vector<cv::Point2i>& contour_segment) {
  return Eigen::Vector2f{
      -float(contour_segment.back().y - contour_segment.front().y),
      float(contour_segment.back().x - contour_segment.front().x) }
  .normalized();
}

bool GeneratePointData(
  const Render* render,
  const Pose& camera2body_pose,
  int sphere_radius,
  bool use_random_seed,
  std::vector<RegionModel::DataPoint>* data_points,
  float* contour_length)
{
  // Compute silhouette
  const cv::Mat& silhouette_image{ render->silhouette_image() };

  // Generate contours
  int pixel_contour_length;
  std::vector<std::vector<cv::Point2i>> contours;
  if (!GenerateValidContours(silhouette_image, &contours,
    &pixel_contour_length))
    return false;
  if (pixel_contour_length == 0) {
    *contour_length = 0.0f;
    return true;
  }

  // Validate contours
  std::vector<cv::Point2i> valid_contour_points;
  float pixel_to_meter = sphere_radius / render->intrinsics().fx;
  float max_depth_difference = pixel_to_meter * kMaxSurfaceGradient;
  for (auto& contour : contours) {
    for (auto& point : contour) {
      if (IsContourPointValid(max_depth_difference, point, render))
        valid_contour_points.push_back(point);
    }
  }
  *contour_length = float(valid_contour_points.size()) * pixel_to_meter;
  if (*contour_length == 0.0f) return true;

  // Set up generator
  std::mt19937 generator{ 7 };
  if (use_random_seed)
    generator.seed(
      unsigned(std::chrono::system_clock::now().time_since_epoch().count()));

  // Generate DataPoints from valid contour points
  int n_tries = 0;
  for (auto data_point{ begin(*data_points) }; data_point != end(*data_points);) {
    if (n_tries++ > kMaxPointSamplingTries) {
      *contour_length = 0.0f;
      return true;
    }

    // Randomly sample point on contour and calculate 3D center
    cv::Point2i center =
      SampleContourPointCoordinate(valid_contour_points, generator);
    Eigen::Vector3f center_f_camera{ render->PointVector(center) };
    data_point->center_f_body = camera2body_pose * center_f_camera;

    // Calculate contour segment and approximate normal vector
    std::vector<cv::Point2i> contour_segment;
    if (!CalculateContourSegment(contours, center, &contour_segment)) continue;
    Eigen::Vector2f normal = ApproximateNormalVector(contour_segment);
    Eigen::Vector3f normal_f_camera{ normal.x(), normal.y(), 0.0f };
    data_point->normal_f_body = camera2body_pose.rotation() * normal_f_camera;

    // Calculate foreground and background distance
    CalculateLineDistances(render, contours,
      center, normal, pixel_to_meter,
      &data_point->foreground_distance,
      &data_point->background_distance);
    data_point++;
    n_tries = 0;
  }

  return true;
}

RegionModel*
RegionModelBuilder::Build(Body* body, const std::string& model_path) {
  std::vector<Eigen::Vector3f> view_points;
  GenerateViewPoints(n_divides_, &view_points);

  std::vector<Pose> camera2body_poses;
  GenerateViewPoses(view_points, sphere_radius_, &camera2body_poses);

  std::unique_ptr<Render> render(SetupRender(body, sphere_radius_));

  RegionModel* model = new RegionModel;
  model->views_.resize(view_points.size());

  bool cancel = false;

  for (int i = 0; i < camera2body_poses.size(); ++i) {
    model->views_[i].orientation =
      camera2body_poses[i].matrix().col(2).segment(0, 3);

    render->set_camera2world_pose(camera2body_poses[i]);
    render->RenderSilhouette();
    render->FetchSilhouetteImage();

    model->views_[i].data_points.resize(n_contour_points_);

    if (!GeneratePointData(
      render.get(),
      camera2body_poses[i],
      sphere_radius_,
      use_random_seed_,
      &model->views_[i].data_points,
      &model->views_[i].contour_length))
      cancel = true;
  }

  return nullptr;
}