#include "feature_task.h"




// return the inlier number in harmony with homography
size_t CountInliersHomo(const std::vector<cv::Point2f>& imgpts_1,
                        const std::vector<cv::Point2f>& imgpts_2,
                        float thres_squared,
                        const cv::Matx33f& homo) {
  CHECK_EQ(imgpts_1.size(), imgpts_2.size()) << "points size is not equal";
  size_t cnt = 0;
  std::vector<cv::Point2f> dst_imgpts_2;
  cv::perspectiveTransform(imgpts_1, dst_imgpts_2, homo);
  for (int i = 0; i < imgpts_1.size(); ++i) {
    float dist_squared = (dst_imgpts_2[i].x - imgpts_2[i].x)*(dst_imgpts_2[i].x - imgpts_2[i].x) +
      (dst_imgpts_2[i].y - imgpts_2[i].y)*(dst_imgpts_2[i].y - imgpts_2[i].y);
    if (dist_squared < thres_squared) {
      ++cnt;
    }
  }
  VLOG(1) << "count inlier homo: " << cnt;
  return cnt;
}

size_t CountInliersHomo(const std::vector<cv::Point2f>& imgpts_1,
                        const std::vector<cv::Point2f>& imgpts_2,
                        const std::vector<char>& mask,
                        float thres_squared,
                        const cv::Matx33f& homo) {
  CHECK_EQ(imgpts_1.size(), imgpts_2.size()) << "image point size not equal";
  CHECK_EQ(imgpts_1.size(), mask.size()) << "image point size and mask size not equal";
  std::vector<cv::Point2f> keypts_1, keypts_2;
  keypts_1.reserve(imgpts_1.size());
  keypts_2.reserve(imgpts_2.size());
  for (int i = 0; i < mask.size(); ++i) {
    if (mask[i]) {
      keypts_1.push_back(imgpts_1[i]);
      keypts_2.push_back(imgpts_2[i]);
    }
  }
  return CountInliersHomo(keypts_1, keypts_2, thres_squared, homo);
}

size_t CountInliersHomo(const std::vector<cv::KeyPoint>& keypts_1,
                        const std::vector<cv::KeyPoint>& keypts_2,
                        const std::vector<cv::DMatch>& matches,
                        const std::vector<char>& mask,
                        float thres_squared,
                        const cv::Matx33f& homo) {

  std::vector<cv::Point2f> inlier_imgpts_1, inlier_imgpts_2;
  inlier_imgpts_1.reserve(matches.size());
  inlier_imgpts_2.reserve(matches.size());
  for (int i = 0; i < matches.size(); ++i) {
    if (mask[i]) {
      inlier_imgpts_1.push_back(keypts_1[matches[i].queryIdx].pt);
      inlier_imgpts_2.push_back(keypts_2[matches[i].trainIdx].pt);
    }
  }

  return CountInliersHomo(inlier_imgpts_1, inlier_imgpts_2, thres_squared, homo);
}



// member function for class ImgPair
FeatPair::FeatPair() {
  feat_1_ = nullptr;
  feat_2_ = nullptr;
  ratio_ = 0.75;
  homo_thres_pixels_squared_ = 4.0;
}

FeatPair::~FeatPair() {
  clear();
}

void FeatPair::clear() {
  if (!feat_1_) {
    delete feat_1_;
    feat_1_ = nullptr;
  }
  if (!feat_2_) {
    delete feat_2_;
    feat_2_ = nullptr;
  }
}

bool FeatPair::empty() {
  return (!feat_1_ || !feat_2_);
}

bool FeatPair::LoadFeat(Feature* feat_1, Feature* feat_2) {
  if (!feat_1 || !feat_2) {
    LOG(ERROR) << "load feature pair error, feat_1: " << feat_1 << " feat_2: " << feat_2;
    return false;
  }
  if (feat_1->ptrFeature2D() == feat_2->ptrFeature2D()) {
    VLOG(1) << "load same features";
  }
  clear();
  feat_1_ = feat_1;
  feat_2_ = feat_2;
  return true;
}

bool FeatPair::ExtractFeature(bool with_desc) {
  if (!empty() && feat_1_->ExtractFeature(with_desc) && feat_2_->ExtractFeature(with_desc)) {
    VLOG(1) << "extract feature success with desc: " << with_desc;
    return true;
  }
  VLOG(1) << "extract feature fail with desc: " << with_desc;
  return false;
}

bool FeatPair::MatchFeat(bool save_fund, bool show_img) {
  if (feat_1_->ptrFeature2D() != feat_2_->ptrFeature2D()) {
    LOG(ERROR) << "different feature point, can not match";
    return false;
  }

  std::vector<std::vector<cv::DMatch>> vec_matches;
  auto matcher = feat_1_->matcher();
  matcher->knnMatch(feat_1_->desc(), feat_2_->desc(), vec_matches, 2);
  
  matches_.clear();
  matches_.reserve(vec_matches.size());
  // ratio test
  for (auto& match_pair : vec_matches) {
    if (match_pair[0].distance < ratio_ * match_pair[1].distance) {
      matches_.push_back(std::move(match_pair[0]));
    }
  }

  VLOG(1) << "matches size : " << matches_.size();
  if (matches_.size() < 10) {
    LOG(WARNING) << "too few matches : " << matches_.size();
    return false;
  }

  std::vector<cv::Point2f> imgpts_1, imgpts_2;
  imgpts_1.reserve(matches_.size());
  imgpts_2.reserve(matches_.size());
  const auto& keypts_1 = feat_1_->keypoint();
  const auto& keypts_2 = feat_2_->keypoint();
  for (const auto& match : matches_) {
    imgpts_1.push_back(keypts_1[match.queryIdx].pt);
    imgpts_2.push_back(keypts_2[match.trainIdx].pt);
  }

  // filter with fundamental
  std::vector<unsigned char> cur_inliers;
  cv::Matx33f F = findFundamentalMat(imgpts_1, imgpts_2, cv::FM_RANSAC, 3.0, 0.99, cur_inliers);
  inliers_.clear();
  inliers_.reserve(cur_inliers.size());
  for (const auto i:cur_inliers) {
    inliers_.push_back(static_cast<char>(i));
  }
  VLOG(1) << "filter with fundamental matrix";

  if (save_fund) { fundamental_ = F; }
  if (show_img) { ShowImgPair(true); }

  return true;
}

void FeatPair::ClearMatches() {
  matches_.clear();
  inliers_.clear();
}

size_t FeatPair::CountInliers() const {
  CHECK_EQ(matches_.size(), inliers_.size()) << "matches and inliers size are not same!";
  size_t cnt = 0;
  for (const auto& i : inliers_) {
    cnt += i;
  }
  VLOG(1) << "total matches number: " << matches_.size() << " inliers: " << cnt;
  return cnt;
}

size_t FeatPair::CountInliersHomography(const cv::Matx33f& homo) {
  return CountInliersHomo(feat_1_->keypoint(), feat_2_->keypoint(), matches_, inliers_, homo_thres_pixels_squared_, homo);
}

bool FeatPair::FindNeighbor(int k) {
  const auto& key_1 = feat_1_->keypoint();
  const auto& key_2 = feat_2_->keypoint();
  cv::Mat pt_key_1(key_1.size(), 2, CV_32FC1);
  cv::Mat pt_key_2(key_2.size(), 2, CV_32FC1);

  for (int i = 0; i < key_1.size(); ++i) {
    pt_key_1.ptr<float>(i)[0] = key_1[i].pt.x;
    pt_key_1.ptr<float>(i)[1] = key_1[i].pt.y;
  }

  for (int i = 0; i < key_2.size(); ++i) {
    pt_key_2.ptr<float>(i)[0] = key_2[i].pt.x;
    pt_key_2.ptr<float>(i)[1] = key_2[i].pt.y;
  }

  cv::flann::Index knn;
  knn.build(pt_key_2, cv::flann::KDTreeIndexParams(5), cvflann::FLANN_DIST_L2);
  cv::Mat indices, dists;
  knn.knnSearch(pt_key_1, indices, dists, k, cv::flann::SearchParams(64));

  feat_1_feat_2_.clear();
  feat_1_feat_2_.reserve(key_1.size());
  size_t cnt = 0;
  for (int i = 0; i < key_1.size(); ++i) {
    feat_1_feat_2_.push_back(std::vector<std::pair<int, float>>());
    auto& cur_vec = feat_1_feat_2_.back();
    for (int j = 0; j < dists.cols; ++j) {
      // push all keypoint2 around a keypoint1 into the vector
      cur_vec.emplace_back(indices.at<int>(i, j), dists.at<float>(i, j));
      ++cnt;
    }
  }
  VLOG(2) << "k: " << k << " average number of neighbors: " << float(cnt) / key_1.size();
  VLOG(1) << "find neighbors okay";
  return true;
}

void FeatPair::GetFullNeighbor(std::vector<std::vector<std::pair<int, float>>>* feat_1_feat_2) const {
  *feat_1_feat_2 = feat_1_feat_2_;
}

void FeatPair::LoadFullNeighbor(const std::vector<std::vector<std::pair<int, float>>>& feat_1_feat_2) {
  feat_1_feat_2_ = feat_1_feat_2;
}

void FeatPair::GetNeighbor(float thres, std::map<int, std::vector<int>> *map_feat1_feat2) const {
  map_feat1_feat2->clear();
  for (int i = 0; i < feat_1_feat_2_.size(); ++i) {
    std::vector<int> neighbors;
    for (int j = 0; j < feat_1_feat_2_[i].size(); ++j) {
      if (thres > feat_1_feat_2_[i][j].second) {
        neighbors.push_back(feat_1_feat_2_[i][j].first);
      } else {
        break; // the first distance is the smallest
      }
    }
    if (!neighbors.empty()) {
      (*map_feat1_feat2)[i] = neighbors;
    }
  }
  VLOG(1) << "threshold: " << thres << " neighbor map size: " << map_feat1_feat2->size();
}

void FeatPair::GetNeighbor(int k, std::map<int, std::vector<int>> *map_feat1_feat2) const {
  map_feat1_feat2->clear();
  for (int i = 0; i < feat_1_feat_2_.size(); ++i) {
    std::vector<int> neighbors;
    for (int j = 0; j < feat_1_feat_2_[i].size() && j < k; ++j) {
      neighbors.push_back(feat_1_feat_2_[i][j].first);    
    }
    if (!neighbors.empty()) {
      (*map_feat1_feat2)[i] = neighbors;
    }
  }
  VLOG(1) << "neighbor map size: " << map_feat1_feat2->size();
}

void FeatPair::ShowImgPair(bool with_match) {
  if (with_match) {
    if (feat_1_->ptrFeature2D() != feat_2_->ptrFeature2D()) {
      LOG(ERROR) << "different feature, can not show matches";
      with_match = false;
    }
  }

  cv::Mat img_out;
  if (with_match) {
    cv::drawMatches(feat_1_->img(),
                    feat_1_->keypoint(),
                    feat_2_->img(),
                    feat_2_->keypoint(),
                    matches_,
                    img_out,
                    cv::Scalar::all(-1),
                    cv::Scalar::all(-1),
                    inliers_);
  } else {
    cv::drawKeypoints(feat_1_->img(), feat_1_->keypoint(), img_out, cv::Scalar(0, 0, 255));
    cv::drawKeypoints(feat_2_->img(), feat_2_->keypoint(), img_out, cv::Scalar(255, 0, 0), cv::DrawMatchesFlags::DRAW_OVER_OUTIMG);
  }

  cv::imshow("matched image", img_out);
  cv::waitKey(0);
}

void FeatPair::Transform(FeatPair& feat_pair_2) {
  VLOG(2) << std::endl << "before transform:" << std::endl
    << "pair1 feat1: " << feat_1_ << " pair1 feat2: " << feat_2_ << std::endl
    << "pair2 feat1: " << feat_pair_2.feat_1() << " pair2 feat2: " << feat_pair_2.feat_2() << std::endl;

  Feature* pair_1_feat_2 = feat_2_;
  feat_2_ = feat_pair_2.feat_1();
  feat_pair_2.feat_1() = pair_1_feat_2;

  VLOG(2) << std::endl << "after transform:" << std::endl
    << "pair1 feat1: " << feat_1_ << " pair1 feat2: " << feat_2_ << std::endl
    << "pair2 feat1: " << feat_pair_2.feat_1() << " pair2 feat2: " << feat_pair_2.feat_2() << std::endl;
}

bool FeatPair::CalculateEpilines(const cv::Matx33f& fund) {
  if (feat_1_->ptrFeature2D() != feat_2_->ptrFeature2D()) {
    LOG(ERROR) << "different feature, calculate epilines fail";
    return false;
  }
  epilines_.clear();
  std::vector<cv::Point2f> keypts_1;
  cv::KeyPoint::convert(feat_1_->keypoint(), keypts_1);
  cv::computeCorrespondEpilines(keypts_1, 1, fund, epilines_);
  return true;
}




ImgCollectionTask::ImgCollectionTask() {
  guided_ratio_ = 0.75;
}

ImgCollectionTask::~ImgCollectionTask() {}

// load image file names from folder or vectors
bool ImgCollectionTask::LoadImgFileNames(const std::string& folder, const std::string& img_types) {
  std::stringstream sstr(img_types);
  std::string type;
  img_files_.clear();
  while (sstr >> type) {
    const std::string path{folder + "/*." + type};
    _finddata_t file;
    intptr_t lf;
    if ((lf = _findfirst(path.c_str(), &file)) == -1) {
      LOG(WARNING) << "no images found in " + path;
      continue;
    } else {
      do {
        const std::string img_file{folder + "/" + file.name};
        VLOG(2) << "find image file : " + img_file;
        img_files_.push_back(img_file);
      } while (_findnext(lf, &file) == 0);
    }
    _findclose(lf);
  }
  VLOG(1) << "find images in total: " << img_files_.size();
  return !img_files_.empty();
}

bool ImgCollectionTask::LoadImgFileNames(const std::vector<std::string>& img_files) {
  img_files_ = img_files;
  return !img_files_.empty();
}

template<class FeatType>
FeatType* ImgCollectionTask::AllocateFeature() {
  FeatType* feat = nullptr;
  while (!feat) {
    try {
      feat = new FeatType;
    }
    catch (std::bad_alloc& exc) {
      LOG(WARNING) << "bad_alloc: " << exc.what();
    }
  }
  return feat;
}


// for repeatability task
void ImgCollectionTask::LoadThresVec(const std::vector<float>& thres_vec) {
  thres_vec_ = thres_vec;
  VLOG(2) << "threshold number: " << thres_vec_.size();
}

void ImgCollectionTask::ImgRepeatTask(size_t idx, bool feat1_sift, bool show_img) {
  if (idx >= img_files_.size()) {
    LOG(ERROR) << "idx is over range: " << idx << "/" << img_files_.size();
    return;
  }

  FeatureSift* sift = AllocateFeature<FeatureSift>();
  FeatureBrisk* brisk = AllocateFeature<FeatureBrisk>();
  sift->LoadImg(img_files_[idx]);
  brisk->LoadImg(*sift);

  FeatPair feat_pair;
  if (feat1_sift) {
    feat_pair.LoadFeat(sift, brisk);
  } else {
    feat_pair.LoadFeat(brisk, sift);
  }
  feat_pair.ExtractFeature(false);
  feat_pair.FindNeighbor(10); // find at most ten neighbors

  auto& results = repeat_results_[idx];
  
#pragma omp parallel for schedule(dynamic)
  for (int i = 0; i < thres_vec_.size(); ++i) {
    std::map<int, std::vector<int>> map_feat1_feat2;
    feat_pair.GetNeighbor(thres_vec_[i], &map_feat1_feat2);
    // do statistic
    float cnt = 0;
    for (const auto& feat : map_feat1_feat2) {
      cnt += feat.second.size();
    }
    float total_with_neig(map_feat1_feat2.size());
    results[i][0] = feat_pair.feat_1_size();
    results[i][1] = feat_pair.feat_2_size();
    results[i][2] = total_with_neig / results[i][0];
    results[i][3] = cnt / total_with_neig;
  }

  VLOG(1) << "finish repeatability task for image: " << img_files_[idx];

  // show image
  if (show_img) {
    feat_pair.ShowImgPair(false);
  }
}

void ImgCollectionTask::ImgCollectionRepeatTask(std::ostream& scout, bool feat1_sift, bool show_img) {
  LOG(INFO) << "feat1 is sift: " << feat1_sift;
  repeat_results_.resize(img_files_.size());
  for (int i = 0; i < img_files_.size(); ++i) {
    repeat_results_[i].resize(thres_vec_.size(), std::vector<float>(4));
    ImgRepeatTask(i, feat1_sift, show_img);
  }

  std::vector<std::vector<float>> results(thres_vec_.size(), std::vector<float>(4, 0.0));
  // statistic results
  for (int i = 0; i < repeat_results_.size(); ++i) {
    const auto& re_res = repeat_results_[i];
    for (int m = 0; m < re_res.size(); ++m) {
      for (int n = 0; n < re_res[m].size(); ++n) {
        results[m][n] = (re_res[m][n] + results[m][n] * i) / (i + 1.0);
      }
    }
  }

  // save result
  for (int i = 0; i < results.size(); ++i) {
    const auto& res = results[i];
    scout << thres_vec_[i] << "  ";
    for (auto f : res) {
      scout << f << "  ";
    }
    scout << std::endl;
  }
  LOG(INFO) << "finish image collection repeatability task";
}

// for guided matching task
bool ImgCollectionTask::LoadHomograhpy(const std::string& folder) {
  const std::vector<std::string> files{"H1to2p", "H1to3p", "H1to4p", "H1to5p", "H1to6p"};
  homography_.clear();
  for (const auto& file : files) {
    std::string homo{folder + "/" + file};
    std::ifstream ifs(homo, std::ios::_Nocreate);
    if (!ifs.is_open()) {
      LOG(ERROR) << "can not find file: " << homo;
      return false;
    }
    cv::Matx33f mat;
    ifs >> mat(0, 0) >> mat(0, 1) >> mat(0, 2)
      >> mat(1, 0) >> mat(1, 1) >> mat(1, 2)
      >> mat(2, 0) >> mat(2, 1) >> mat(2, 2);
    homography_.push_back(mat);
  }
  VLOG(1) << "get homography mat okay : " << homography_.size();
  return true;
}

bool ImgCollectionTask::SiftGuidedMatch(const int idx, bool show_img) {
  if (idx >= pairs_.size()) {
    LOG(ERROR) << "idx is over range: " << idx << "/" << pairs_.size();
    return false;
  }

  FeatureSift* sift_1 = AllocateFeature<FeatureSift>();
  FeatureSift* sift_2 = AllocateFeature<FeatureSift>();
  FeatureBrisk* brisk_1 = AllocateFeature<FeatureBrisk>();
  FeatureBrisk* brisk_2 = AllocateFeature<FeatureBrisk>();
  
  sift_1->LoadImg(img_files_[pairs_[idx].first]);
  sift_2->LoadImg(img_files_[pairs_[idx].second]);
  brisk_1->LoadImg(*sift_1);
  brisk_2->LoadImg(*sift_2);

  // first construct <sift_1, brisk_1> <sift_2, brisk_2> pairs to find neighbors
  FeatPair feat_pair_1, feat_pair_2;
  feat_pair_1.LoadFeat(sift_1, brisk_1);
  feat_pair_2.LoadFeat(sift_2, brisk_2);

  feat_pair_1.ExtractFeature(true);
  feat_pair_2.ExtractFeature(true);
  feat_pair_1.FindNeighbor(10); // find at most ten neighbors
  feat_pair_2.FindNeighbor(10);
  
  // then transform to <sift_1, sift_2> <brisk_1, brisk_2> 
  feat_pair_1.Transform(feat_pair_2);
  feat_pair_1.MatchFeat(true, show_img);  // save fundamental
  feat_pair_2.MatchFeat(false, show_img); // not save fundamental
  uint32_t feat_1_matches = feat_pair_1.matches_size();
  uint32_t feat_2_matches = feat_pair_2.matches_size();
  uint32_t feat_1_matches_inlier = feat_pair_1.CountInliers();
  uint32_t feat_2_matches_inlier = feat_pair_2.CountInliers();
  uint32_t feat_1_matches_inlier_homo = feat_pair_1.CountInliersHomography(homography_[idx]);
  uint32_t feat_2_matches_inlier_homo = feat_pair_2.CountInliersHomography(homography_[idx]);

  const auto& fund_pair_1 = feat_pair_1.fundamental();
  const auto& sift_matches = feat_pair_1.matches();
  feat_pair_2.CalculateEpilines(fund_pair_1);
  const auto& epilines_pair_2 = feat_pair_2.epilines();

  const auto& pair_2_feat_1 = feat_pair_2.feat_1()->keypoint();
  const auto& pair_2_feat_2 = feat_pair_2.feat_2()->keypoint();
  const auto& pair_2_desc_1 = feat_pair_2.feat_1()->desc();
  const auto& pair_2_desc_2 = feat_pair_2.feat_2()->desc();

#pragma omp parallel for schedule(dynamic)
  for (int i = 0; i < thres_vec_.size(); ++i) {
    std::vector<cv::DMatch> guided_matches;
    guided_matches.reserve(feat_pair_1.matches_size());
    std::vector<cv::Point2f> imgpts_1, imgpts_2;

    //feat_pair_2.ClearMatches();
    std::set<uint32_t> found_feat_1, found_feat_2;
    // feat1 guided matching feat2
    std::map<int, std::vector<int>> map_feat_pair_1, map_feat_pair_2;
    feat_pair_1.GetNeighbor(thres_vec_[i], &map_feat_pair_1);
    feat_pair_2.GetNeighbor(thres_vec_[i], &map_feat_pair_2);
    // start guided matching
    for (const auto& match : sift_matches) {
      int min_dist_1 = INT_MAX; // minimum
      int min_dist_2 = INT_MAX; // second minimum
      cv::DMatch min_match_1, min_match_2;
      if (map_feat_pair_1.find(match.queryIdx) == map_feat_pair_1.end() ||
          map_feat_pair_2.find(match.trainIdx) == map_feat_pair_2.end()) {
        continue; // early judge
      }
      float dist2 = 0.0;
      // for each neighboring BRISK
      for (const int feature1 : map_feat_pair_1[match.queryIdx]) {
        if (found_feat_1.find(feature1) != found_feat_1.end()) {
          continue; // this feature has been included
        }
        const auto brisk_desc1 = pair_2_desc_1.row(feature1);
        // find possible match
        const float a = epilines_pair_2[feature1][0];
        const float b = epilines_pair_2[feature1][1];
        const float c = epilines_pair_2[feature1][2];
        for (const int feature2 : map_feat_pair_2[match.trainIdx]) {
          if (found_feat_2.find(feature2) != found_feat_2.end()) {
            continue; // this point has been included 
          }
          float dist1 = a*pair_2_feat_2[feature2].pt.x + b*pair_2_feat_2[feature2].pt.y + c;
          dist2 = dist1*dist1 / (a*a + b*b + 0.000001);
          if (dist2 > 9.0) {
            continue;
          }
          const auto brisk_desc2 = pair_2_desc_2.row(feature2);
          int dist = cv::norm(brisk_desc1, brisk_desc2, cv::NORM_HAMMING);
          if (dist < min_dist_2) {
            min_dist_2 = dist;
            min_match_2 = cv::DMatch{feature1, feature2, static_cast<float>(dist)};
          }
        }
        if (min_dist_2 < min_dist_1) {
          using std::swap;
          swap(min_dist_1, min_dist_2);
          swap(min_match_1, min_match_2);
        }
      }
      if (min_dist_1 < INT_MAX) {
        if (min_dist_2 == INT_MAX || min_dist_1 < guided_ratio_*min_dist_2) {
          // if there are two points, do ratio test
          guided_matches.push_back(min_match_1);
          found_feat_1.insert(min_match_1.queryIdx);
          found_feat_2.insert(min_match_1.trainIdx);
          imgpts_1.push_back(pair_2_feat_1[min_match_1.queryIdx].pt);
          imgpts_2.push_back(pair_2_feat_2[min_match_1.trainIdx].pt);
        }
      }
    }
    VLOG(1) << "guided match okay: " << guided_matches.size() << " for threshold: " << thres_vec_[i];

    std::vector<unsigned char> uc_inliers; 
    std::vector<char> inliers;
    uint32_t guided_inlier = 0;
    uint32_t guided_homo_inlier = 0;
    if (guided_matches.size() > 10) {
      findFundamentalMat(imgpts_1, imgpts_2, cv::FM_RANSAC, 3.0, 0.99, uc_inliers);
      inliers.reserve(uc_inliers.size());
      for (const auto i : uc_inliers) {
        guided_inlier += i;
        inliers.push_back(i);
      }
      guided_homo_inlier = CountInliersHomo(imgpts_1,
                                            imgpts_2,
                                            inliers,
                                            feat_pair_2.homo_thres_squared(),
                                            homography_[idx]);
    } else {
      LOG(WARNING) << "guided matches size less than 10";
    }
   
    VLOG(2) << "save result for thres: " << thres_vec_[i] << std::endl;

    // statistic results
    auto& results = guided_results_[idx][i];
    results.clear();
    results.push_back(thres_vec_[i]);
    results.push_back((feat_pair_1.feat_1_size() + feat_pair_1.feat_2_size())/2.0);
    results.push_back(feat_1_matches);
    results.push_back(feat_1_matches_inlier);
    results.push_back(feat_1_matches_inlier_homo);
    results.push_back((feat_pair_2.feat_1_size() + feat_pair_2.feat_2_size()) / 2.0);
    results.push_back(feat_2_matches);
    results.push_back(feat_2_matches_inlier);
    results.push_back(feat_2_matches_inlier_homo);
    // guided results
    results.push_back(guided_matches.size());
    results.push_back(guided_inlier);
    results.push_back(guided_homo_inlier);
  }
  VLOG(1) << "finish guided matching for image pair: " << idx << std::endl;
  return  true;
}

// for geometry guided matching
bool ImgCollectionTask::GeometryGuidedMatch(const int idx, bool show_img) {
  if (idx >= pairs_.size()) {
    LOG(ERROR) << "idx is over range: " << idx << "/" << pairs_.size();
    return false;
  }

  FeatureSift* sift_1 = AllocateFeature<FeatureSift>();
  FeatureSift* sift_2 = AllocateFeature<FeatureSift>();
  FeatureBrisk* brisk_1 = AllocateFeature<FeatureBrisk>();
  FeatureBrisk* brisk_2 = AllocateFeature<FeatureBrisk>();

  sift_1->LoadImg(img_files_[pairs_[idx].first]);
  sift_2->LoadImg(img_files_[pairs_[idx].second]);
  brisk_1->LoadImg(*sift_1);
  brisk_2->LoadImg(*sift_2);

  FeatPair feat_pair_1, feat_pair_2;
  feat_pair_1.LoadFeat(sift_1, sift_2);
  feat_pair_2.LoadFeat(brisk_1, brisk_2);

  feat_pair_1.ExtractFeature(true);
  feat_pair_2.ExtractFeature(true);

  feat_pair_1.MatchFeat(true, show_img);  // save fundamental
  feat_pair_2.MatchFeat(false, show_img); // not save fundamental

  uint32_t feat_1_matches = feat_pair_1.matches_size();
  uint32_t feat_2_matches = feat_pair_2.matches_size();
  uint32_t feat_1_matches_inlier = feat_pair_1.CountInliers();
  uint32_t feat_2_matches_inlier = feat_pair_2.CountInliers();
  uint32_t feat_1_matches_inlier_homo = feat_pair_1.CountInliersHomography(homography_[idx]);
  uint32_t feat_2_matches_inlier_homo = feat_pair_2.CountInliersHomography(homography_[idx]);
  
  // clear feat pair 2 matches
  feat_pair_2.ClearMatches();

  const auto& fund_pair_1 = feat_pair_1.fundamental();
  
  feat_pair_2.CalculateEpilines(fund_pair_1);
  const auto& epilines_pair_2 = feat_pair_2.epilines();

  const auto& pair_2_feat_1 = feat_pair_2.feat_1()->keypoint();
  const auto& pair_2_feat_2 = feat_pair_2.feat_2()->keypoint();
  const auto& pair_2_desc_1 = feat_pair_2.feat_1()->desc();
  const auto& pair_2_desc_2 = feat_pair_2.feat_2()->desc();

#pragma omp parallel for schedule(dynamic)
  for (int i = 0; i < thres_vec_.size(); ++i) {
    std::set<uint32_t> found_feat_2;
    std::vector<cv::DMatch> guided_matches;
    std::vector<cv::Point2f> imgpts_1, imgpts_2;
    guided_matches.reserve(2 * feat_pair_1.matches_size());

    // start guided matching
    for (int feature1 = 0; feature1 < pair_2_feat_1.size(); ++feature1) {
      int min_dist_1 = INT_MAX; // minimum
      int min_dist_2 = INT_MAX; // second minimum
      cv::DMatch min_match_1, min_match_2;
      float dist2 = 0.0;

      const float a = epilines_pair_2[feature1][0];
      const float b = epilines_pair_2[feature1][1];
      const float c = epilines_pair_2[feature1][2];
      const auto brisk_desc1 = pair_2_desc_1.row(feature1);

      // for each possible BRISK
      for (int feature2 = 0; feature2 < pair_2_feat_2.size(); ++feature2) {
        if (found_feat_2.find(feature2) != found_feat_2.end()) {
          continue; // this feature has been included
        }

        // find possible match
        float dist1 = a*pair_2_feat_2[feature2].pt.x + b*pair_2_feat_2[feature2].pt.y + c;
        dist2 = dist1*dist1 / (a*a + b*b + 0.000001);
        if (dist2 > 9.0) {
          continue;
        }
        const auto brisk_desc2 = pair_2_desc_2.row(feature2);
        int dist = cv::norm(brisk_desc1, brisk_desc2, cv::NORM_HAMMING);
        if (dist < min_dist_2) {
          min_dist_2 = dist;
          min_match_2 = cv::DMatch{feature1, feature2, static_cast<float>(dist)};
        }
        if (min_dist_2 < min_dist_1) {
          using std::swap;
          swap(min_dist_1, min_dist_2);
          swap(min_match_1, min_match_2);
        }
      }
      if (min_dist_1 < INT_MAX) {
        if (min_dist_2 == INT_MAX || min_dist_1 < guided_ratio_*min_dist_2) {
          // if there are two points, do ratio test
          guided_matches.push_back(min_match_1);
          found_feat_2.insert(min_match_1.trainIdx);
          imgpts_1.push_back(pair_2_feat_1[min_match_1.queryIdx].pt);
          imgpts_2.push_back(pair_2_feat_2[min_match_1.trainIdx].pt);
        }
      }
    }
    VLOG(1) << "guided match okay: " << guided_matches.size() << " for threshold: " << thres_vec_[i];

    std::vector<unsigned char> uc_inliers;
    std::vector<char> inliers;
    uint32_t guided_inlier = 0;
    uint32_t guided_homo_inlier = 0;
    if (guided_matches.size() > 10) {
      findFundamentalMat(imgpts_1, imgpts_2, cv::FM_RANSAC, 3.0, 0.99, uc_inliers);
      inliers.reserve(uc_inliers.size());
      for (const auto i : uc_inliers) {
        guided_inlier += i;
        inliers.push_back(i);
      }
      guided_homo_inlier = CountInliersHomo(imgpts_1,
                                            imgpts_2,
                                            inliers,
                                            feat_pair_2.homo_thres_squared(),
                                            homography_[idx]);
    } else {
      LOG(WARNING) << "guided matches size less than 10";
    }

    VLOG(2) << "save result for thres: " << thres_vec_[i] << std::endl;
    // statistic results
    auto& results = guided_results_[idx][i];
    results.clear();
    results.push_back(thres_vec_[i]);
    results.push_back((feat_pair_1.feat_1_size() + feat_pair_1.feat_2_size()) / 2.0);
    results.push_back(feat_1_matches);
    results.push_back(feat_1_matches_inlier);
    results.push_back(feat_1_matches_inlier_homo);
    results.push_back((feat_pair_2.feat_1_size() + feat_pair_2.feat_2_size()) / 2.0);
    results.push_back(feat_2_matches);
    results.push_back(feat_2_matches_inlier);
    results.push_back(feat_2_matches_inlier_homo);
    // guided results
    results.push_back(guided_matches.size());
    results.push_back(guided_inlier);
    results.push_back(guided_homo_inlier);
  }
  return true;
}

bool ImgCollectionTask::GuidedMatchTask(bool(ImgCollectionTask::*ptr_guided_task)(const int, bool), std::ostream& sout, bool show_img) {
  pairs_.clear();
  for (int i = 1; i < img_files_.size(); ++i) {
    pairs_.push_back(std::make_pair(0, i));
  }
  VLOG(2) << "number of image pairs: " << pairs_.size();
  CHECK_EQ(pairs_.size(), homography_.size()) << "image pairs size is not equal to homo size";

  guided_results_.resize(pairs_.size());
  for (int i = 0; i < pairs_.size(); ++i) {
    guided_results_[i].resize(thres_vec_.size(), std::vector<float>(12, 0.0));
    (this->*ptr_guided_task)(i, show_img);
  }

  VLOG(1) << "finish guided match, start statistic results";

  // statistic results
  std::vector<std::vector<float>> results(thres_vec_.size(), std::vector<float>(12, 0.0));
  for (int i = 0; i < pairs_.size(); ++i) {
    for (int j = 0; j < thres_vec_.size(); ++j) {
      for (int k = 0; k < results[i].size(); ++k) {
        results[j][k] = (results[j][k] * i + guided_results_[i][j][k]) / (i + 1.0);
      }
    }
  }

  // save results
  for (int i = 0; i < results.size(); ++i) {
    for (int j = 0; j < results[i].size(); ++j) {
      sout << results[i][j] << " ";
    }
    sout << std::endl;
  }

  return true;
}

