/***************************************************************************//**
 * @file camera_array_matching.cpp
 * @brief Finds matching locations among images to solve the correspondence problem.
 *
 * @page "Camera Matching" Camera Matching
 *
 * @authors Brian Fehrman and Scott Logan
 * 
 * @date April 17 2013
 * 
 * @section file_section File Information
 *
 * @details Implementation file for the matching functions
 *       which are used to solve the correspondence problem. Multiple solutions are
 *       available and currently include:
 * 
 *       1) Brute Force Template Matching
 * 
 *       2) Feature Based Matching
 * 
         This expects that the images captured for
 *       each camera will be stored in the format:
@verbatim
base_dir/cam_row#_col#/img_suf
@endverbatim

 *       where row# and col# specify the camera's position in the physical
 *       camera array. img_suf is the name of the image and should be the same
 *       for each image that is used. For instance, for a 2x2 array the expected
 *       image paths would be:
 
@verbatim
base_dir/cam_0_0/img_suf
base_dir/cam_0_1/img_suf
base_dir/cam_1_0/img_suf
base_dir/cam_1_1/img_suf
@endverbatim
 * 
 * The number of cameras used is specified in the settings file. An anchor cam is
 * also specified in this layout. The anchor cam image is compared to the other
 * images in the set to find matching points among the anchor cam and the other
 * images. The match points are saved to an XML file that can be used to
 * generate the depth maps.
 * 
 * @section settings_section Required Settings
 * 
@verbatim

[ "base_dir" ] - name of the base folder where the image sets are stored (string)
[ "img_suf" ] - common image name that is expected for each image (string)
[ "output_dir" ] - name of folder where the output matches should be stored (string)
[ "undistort" ] - would allow for images to be undistorted based on camera calibration information [currently not implemented] (int)
[ "matching_scheme" ] - specifies which matching scheme to use (int)
[ "anchor_cam" ] - specifies number of cameras, their layout, and the anchor cam (mat)
[ "template_size" ] - size of template used for brute-force matching (int)
[ "main_search" ] - number of pixels to search along direction of main camera offset (int)
[ "offset_search" ] - number of pixels to search along direction of non-expected camera offset and attempts to account for camera misalignment (int)
["surf_params"]["hessian_threshold"]  - hessian threshold for the SURF matching routine (double)
["surf_params"]["octaves"] - number of octaves to create for each layer (int)
["surf_params"]["layers"] - number of layers to create (int)
["surf_params"]["extended"] - SURF parameter (int)
["surf_params"]["upright"] - specifies if image is upright (int)
["flann_params"]["threshold"] - FLANN matching threshold (double)
@endverbatim
 * 
 * @section todo_bugs_modification_section Todo, Bugs, and Modifications
 *
 * @todo Add undistort functionality
 ******************************************************************************/

#include "camera_array_matching.h"

/***************************************************************************//**
 * @author Brian Fehrman and Scott Logan
 *
 * @par Description:
 * Performs matching on a given image set using the specified matching scheme. The
 * results are written to a file that can be read in by a depth map generation
 * routine. 
 *
 * @param[in] fs - FileNode pointing to the settings that are needed
 *
 ******************************************************************************/
void camera_array_match( FileNode fs )
{
   string base_dir;
   string img_suf;
   string output_dir;
   int undistort;
   int matching_scheme;
   Mat anchor_cam;

   // template scheme
   int template_size;
   int main_search;
   int offset_search;

   // feature scheme
   CvSURFParams surf_params;
   double flann_threshold;
   
   fs[ "base_dir" ] >> base_dir;
   fs[ "img_suf" ] >> img_suf;
   fs[ "output_dir" ] >> output_dir;
   fs[ "undistort" ] >> undistort;
   fs[ "matching_scheme" ] >> matching_scheme;
   fs[ "anchor_cam" ] >> anchor_cam;

   fs[ "template_size" ] >> template_size;
   fs[ "main_search" ] >> main_search;
   fs[ "offset_search" ] >> offset_search;

   fs["surf_params"]["hessian_threshold"] >> surf_params.hessianThreshold;
   fs["surf_params"]["octaves"] >> surf_params.nOctaves;
   fs["surf_params"]["layers"] >> surf_params.nOctaveLayers;
   fs["surf_params"]["extended"] >> surf_params.extended;
   fs["surf_params"]["upright"] >> surf_params.upright;
   fs["flann_params"]["threshold"] >> flann_threshold;

   vector<Mat> gray_imgs;
   vector<Mat> matches;
   vector<Mat> cam_mats;
   vector<Point> search_direction;
   string img_base = base_dir + "/cam_";
   string out_base = output_dir;
   Point anchor_pos;
   Mat anchor_img;
   int anchor_idx;
   FileStorage match_file;
   
   mkdir( out_base.c_str(), S_IRWXU|S_IRGRP|S_IXGRP );
   out_base += "/matches.xml";
   
   match_file.open( out_base, FileStorage::WRITE );
   
   for( int row = 0; row < anchor_cam.rows; row++ )
   {
      for( int col = 0; col < anchor_cam.cols; col++ )
      {
         stringstream ss;
         Mat img_in;
         
         ss << img_base << row << "_" << col << "/" << img_suf;
      
         img_in = imread( ss.str() );
         
         cvtColor( img_in, img_in, CV_BGR2GRAY );
         switch(matching_scheme)
         {
         case 0:
             img_in.convertTo( img_in, CV_32F, 1/255.0 );
             break;
         case 1:
             img_in.convertTo( img_in, CV_8U );
             break;
         }
         
         gray_imgs.push_back( img_in );
         
         if( anchor_cam.at< int >( row, col ) > 0 )
         {
            anchor_pos.x = col;
            anchor_pos.y = row;
         }
      }
   }
   
   anchor_idx = anchor_pos.x * anchor_cam.cols + anchor_pos.x;
   
   gray_imgs[ anchor_idx ].copyTo( anchor_img );
   
   imshow( "anchor", anchor_img );
   
   for( int row = 0; row < anchor_cam.rows; row++ )
   {
      for( int col = 0; col < anchor_cam.cols; col++ )
      {
         int flip_x = 0;
         int flip_y = 0;
         int idx = row * anchor_cam.cols + col;
         Mat comp_img = gray_imgs[ idx ];
         Mat match_curr = Mat::zeros( anchor_img.rows, anchor_img.cols, CV_32FC3 );
         Point search_region;
         
         //Don't compare anchor to itself
         if( row == anchor_pos.y && col == anchor_pos.x )
         {
            continue;
         }
         
         //Flip one of the images if needed so that the indexing for
         //the brute force method can always look in the increasing
         //X direction and/or Y direction.
         if( col == anchor_pos.x )
         {
            flip_x = 0;
            search_region.x = offset_search;
            search_region.y = main_search;
            
            if( row > anchor_pos.y )
            {
               flip_y = 1;
            }
         }
         else if( row == anchor_pos.y )
         {
            flip_y = 0;
            search_region.x = main_search;
            search_region.y = offset_search;
            
            if( col > anchor_pos.x )
            {
               flip_x = 1;
            }
         }
         else 
         {  
            search_region.x = main_search;
            search_region.y = main_search;
            
            if( col > anchor_pos.x )
            {
               flip_y = 1;
            }
            
            if( row > anchor_pos.y )
            {
               flip_x = 1;  
            }
         }

         switch(matching_scheme)
         {
         case 0:
             template_matching( anchor_img, comp_img, template_size, search_region, match_curr, flip_x, flip_y );
             break;
         case 1:
             feature_matching( anchor_img, comp_img, surf_params, flann_threshold, match_curr );
             break;
         default:
             cerr << "ERROR: Unknown matching scheme '" << matching_scheme << '\'' << endl;
         }
         
         matches.push_back( match_curr );
         
         search_direction.push_back( search_region );
      }
   }
   
   match_file << "matches" << matches;
   match_file << "search_direction" << search_direction;
   
   match_file.release();
}



/***************************************************************************//**
 * @author Brian Fehrman
 *
 * @par Description:
 * Performs brute-force template matching on two images. Normalized-correlation
 * is used as the matching metric or score. The algorithm sets a "hit" threshold and a "max"
 * threshold. The number of potential matches with a score higher than the hit threshold
 * are tallied. If the number of scores above the hits threshold becomes too great then
 * the spot will be marked as "unsure" in an attempt to prevent false information
 * from being presented which could be more detrimental than no information. Additionally,
 * the match chosen must have a score greater than the max threshold that is defined. It
 * was found that 0.85 and 0.90 were good hit and max thresholds, respectively. These are
 * currently hard coded. Depending on the template_size and search region, this function
 * can take a very long time. The progress of the function is displayed for the user.
 *
 * @param[in] anchor_img - anchor image for matching
 * @param[in] comp_img - image to which the anchor image is compared
 * @param[in] template_size - size of the matching template to use
 * @param[in] search_region - how many pixels to search along each direction
 * @param[in/out] - matrix where the matching information will be stored
 * @param[in] flip_x - specifies if the images should be flipped along the x axis
 * @param[in] flip_y - specifies if the iamges should be flipped along the y axis
 *
 ******************************************************************************/
void template_matching( Mat anchor_img, Mat comp_img, int template_size, Point search_region, Mat& matches, int flip_x, int flip_y )
{
   int t_size_div_2 = template_size / 2;
   int nrows = anchor_img.rows;
   int ncols = anchor_img.cols;
   matches = Mat::zeros( nrows, ncols, CV_32FC3 );
   double hit_thresh = 0.85;
   double max_thresh = 0.90;
   const double proc_total = ( nrows - template_size ) / 100.0;
   int proc_curr = 0;
   
   //GaussianBlur( anchor_img, anchor_img, Size( 3, 3 ), 1 , 0 );
   //GaussianBlur( img_2, img_2, Size( 3, 3 ), 1 , 0 );
   
   if( flip_x )
   {
      flip( anchor_img, anchor_img, 0 );
      flip( comp_img, comp_img, 0 );
   }
   if( flip_y )
   {
      flip( anchor_img, anchor_img, 1 );
      flip( comp_img, comp_img, 1 );
   }
   
   #pragma omp parallel for
   for( int row = 0; row < nrows - template_size; row++ )
   {
      for( int col = 0; col < ncols - template_size; col++ )
      {
         Mat des_temp;
         Mat search_temp;
         double max_response = -10000;
         double response = 0.0;
         double mean_anchor;
         double std_dev_anchor;
         double mean_comp;
         double std_dev_comp;
         Point max_loc;
         int num_hits = 0, num_tries = 0;
         
         //Create the desired template and compute its mean and standard deviation
         anchor_img.rowRange( row, row + template_size - 1 ).colRange( col, col + template_size - 1 ).copyTo( des_temp );
         mean_anchor = compute_mean( des_temp );
         std_dev_anchor = compute_std_dev( des_temp, mean_anchor );
         
         for( int search_y = row; ( search_y < ( nrows - template_size ) ) && ( search_y <  ( row + search_region.y ) ); search_y++ )
         {
            for( int search_x = col; ( search_x < ( ncols - template_size ) ) && ( search_x < ( col + search_region.x ) ); search_x++ )
            {
               comp_img.rowRange( search_y, search_y + template_size - 1 ).colRange( search_x, search_x + template_size - 1 ).copyTo( search_temp );
               mean_comp = compute_mean( search_temp );
               std_dev_comp = compute_std_dev( search_temp, mean_comp );
            
               response = normalized_correlation( des_temp, search_temp, mean_anchor, mean_comp, std_dev_anchor, std_dev_comp );
               
               if( response > hit_thresh )
               {
                  num_hits++;
                  
                  if( response > max_response )
                  {
                     max_response = response;
                     max_loc.x = search_x + t_size_div_2;
                     max_loc.y = search_y + t_size_div_2;
                  }
               }
               
               num_tries++;
               
               if( response == 1 ) break;
            }
            
            if( response == 1 ) break;
         }
         
         if( max_response > max_thresh && ( ( (float) num_hits / num_tries ) < 0.5 ) )
         {
            matches.at< Vec3f >( row + t_size_div_2, col + t_size_div_2 )[ 0 ] = max_loc.x;
            matches.at< Vec3f >( row + t_size_div_2, col + t_size_div_2 )[ 1 ] = max_loc.y;
            matches.at< Vec3f >( row + t_size_div_2, col + t_size_div_2 )[ 2 ] = max_response;
         }
      }
      
       #pragma omp critical( dataupdate )
       {
         proc_curr++;
         cout << '\r' << "Total processed: " << setprecision(1) << fixed << setw(5) << proc_curr / proc_total << "%" << flush;
       }
   }

   cout << endl;
   
   if( flip_x )
   {
      flip( matches, matches, 0 );
   }
   if( flip_y )
   {
      flip( matches, matches, 1 );
   }
}

/***************************************************************************//**
 * @author Brian Fehrman
 *
 * @par Description: Computes the mean of a given image/image region
 *
 * @param[in] img - the image/image region to compute the mean for
 * 
 * @returns mean of the image/image region
 *
 ******************************************************************************/
double compute_mean( Mat& img )
{
   int nrows = img.rows;
   int ncols = img.cols;
   double mean = 0.0;
   
   for( int row = 0; row < nrows; row++ )
   {
      for( int col = 0; col < ncols; col++ )
      {
         mean += img.at< float >( row, col );
      }
   }
   
   mean /= ( nrows * ncols );
   
   return mean;
}

/***************************************************************************//**
 * @author Brian Fehrman
 *
 * @par Description: Computes the standard deviation of a given image/image region
 *
 * @param[in] img - the image/image region to compute the standard deviation for
 * @param[in] mean - the mean of the image/image region
 * 
 * @returns standard deviation of the image/image region
 *
 ******************************************************************************/
double compute_std_dev( Mat& img, double mean )
{
   int nrows = img.rows;
   int ncols = img.cols;
   double std_dev = 0.0;
   
   for( int row = 0; row < nrows; row++ )
   {
      for( int col = 0; col < ncols; col++ )
      {
         double diff = img.at< float  >( row, col ) - mean;
         std_dev += ( diff * diff );
      }
   }
   
   std_dev /= ( nrows * ncols );
   
   std_dev = sqrt( std_dev );
   
   return std_dev;
}

/***************************************************************************//**
 * @author Brian Fehrman
 *
 * @par Description: Computes the normalized correlation of two images/image regions
 *
 * @param[in] img_1 - fisrt image/image region
 * @param[in] img_2 - second image/image region
 * @param[in] mean_1 - mean of the second image/image region
 * @param[in] mean_2 - mean of the second image/image region
 * @param[in] std_dev_1 - standard deviation of the second image/image region
 * @param[in] std_dev_2 - standard deviation of the second image/image region
 * 
 * @returns score of normalized correlation of the image/image regions, 0-1 with 1
 * being a perfect match
 *
 ******************************************************************************/
double normalized_correlation( Mat& img_1, Mat& img_2, double mean_1, double mean_2,
                                 double std_dev_1, double std_dev_2 )
{
   int nrows = img_1.rows;
   int ncols = img_1.cols;
   double n_correlation = 0.0;   
   double num = 0, den = 0;
   double std_prod = ( std_dev_1 * std_dev_1 ) * ( std_dev_2 * std_dev_2 ) * ( nrows * ncols ) * ( nrows * ncols ); 
      
   for( int row = 0; row < nrows; row++ )
   {
      for( int col = 0; col < ncols; col++ )
      {
         double diff_1 = img_1.at< float  >( row, col ) - mean_1;
         double diff_2 = img_2.at< float >( row, col ) - mean_2;
         num += ( diff_1 * diff_2 );
      }
   }
   
   den = sqrt( std_prod );
   
   n_correlation = num / den;
   
   return n_correlation;
}

/***************************************************************************//**
 * @author Scott Logan
 *
 * @par Description: Performs feature based matching on two regions using OpenCV's
 * built in SURF routine to find features and FLANN to find matching features.
 *
 * @param[in] anchor_img - anchor image for matching
 * @param[in] comp_img - image to which the anchor image is compared
 * @param[in] CvSURFParams - parameters needed for the SURF routine
 * @param[in] flann_threshold - threshold for determining matches
 * @param[out] - stores the matches found
 ******************************************************************************/
void feature_matching( const Mat &anchor_img, const Mat &comp_img, const CvSURFParams &params, const double &flann_threshold, Mat &matches )
{
  static SURF surf( params.hessianThreshold, params.nOctaves, params.nOctaveLayers, params.extended, params.upright );
  static FlannBasedMatcher flann;

  static bool have_anchor = false;
  static vector<KeyPoint> anchor_keyps;
  static Mat anchor_descs;

  vector<KeyPoint> comp_keyps;
  Mat comp_descs;
  std::vector<DMatch> flann_matches;

  if( !have_anchor )
  {
    cout << "SURFing anchor..." << flush;
    surf( anchor_img, Mat(), anchor_keyps, anchor_descs );
    cout << "done." << endl << ">Got " << anchor_keyps.size( ) << " keypoints" << endl << endl;
    have_anchor = true;
  }

  cout << "SURFing..." << flush;
  surf( comp_img, Mat(), comp_keyps, comp_descs );
  cout << "done." << endl << ">Got " << comp_keyps.size( ) << " keypoints" << endl << endl;

  cout << "FLANNing..." << flush;
  flann.match( anchor_descs, comp_descs, flann_matches );
  cout << "done." << endl << endl;

  cout << "Filtering Matches..." << flush;
  unsigned int flann_good = 0;
  for( unsigned int i = 0; i < flann_matches.size( ); i++ )
  {
    if( flann_matches[i].distance <= flann_threshold )
    {
      matches.at< Vec3f >( anchor_keyps[flann_matches[i].queryIdx].pt.y,  anchor_keyps[flann_matches[i].queryIdx].pt.x )[ 0 ] = comp_keyps[flann_matches[i].trainIdx].pt.x;
      matches.at< Vec3f >( anchor_keyps[flann_matches[i].queryIdx].pt.y,  anchor_keyps[flann_matches[i].queryIdx].pt.x )[ 1 ] = comp_keyps[flann_matches[i].trainIdx].pt.y;
      matches.at< Vec3f >( anchor_keyps[flann_matches[i].queryIdx].pt.y,  anchor_keyps[flann_matches[i].queryIdx].pt.x )[ 2 ] = 1 - flann_matches[i].distance;
      flann_good++;
    }
  }
  cout << "done." << endl << ">Got " << flann_good << " good matches" << endl << endl;
}

