#include <Vision.h>
#include <cv.h>
#include <highgui.h>
#include <ctype.h>
#include <stdio.h>
#include <stdlib.h>
#include <luke.h>
#include <Generic.h>
#include <time.h>
#include <ctime>
#include <unistd.h>
#include <sys/time.h>

//Capture
#include <unistd.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <stdio.h>
#include <sys/ioctl.h>
#include <stdlib.h>

#include <linux/types.h>
#include <linux/videodev.h>

#define VIDEO_DEV "/dev/video0"
#define PI 3.14159265

#ifndef MAX
  #define MAX(a, b) (((a) > (b)) ? (a) : (b))
#endif


#ifdef CUDASURF
  #include "../SURFGPU-1.0.1/ipoint.h"
  #include "SURFGPU-1.0.1/surflib.h"
  #include "SURFGPU-1.0.1/kmeans.h"
  #include "SURFGPU-1.0.1/utils.h"
  #include <iostream>
#endif



CVision::CVision( unsigned BytesPerPixel )
: mFrameBpp( BytesPerPixel )
, mCaptureDevice( 0 )
, mpImageBuffer( NULL )
, mExtract( 300, 2, 2, true )
, mMatchThreshold( 0.3 )
, mFilterBoxSize( 5 )
{
  ;
}

CVision::~CVision( void ) 
{
  if( 0 < mCaptureDevice )
  {
    close( mCaptureDevice );
  }
}

SResult
CVision::InitCam( const char* Device ) 
{
  SResult Result;
  struct video_capability cap;
  struct video_window win;
  struct video_picture vpic;

  // edit by tim for displaying
  //cv::namedWindow( "Image",  1 );

  if (Result) 
  {
      mCaptureDevice = open( Device, O_RDONLY);
      Result.SetResult
      (
        !( mCaptureDevice < 0 )
      ) << " Incorrect device selected: " << Device;
  }

  if (Result) 
  {
      Result.SetResult
      (
        !( ioctl( mCaptureDevice, VIDIOCGCAP, &cap ) < 0 )
      ) << " No video for Linux device";
  }

  if (Result) 
  {
      Result.SetResult
      (
        !( ioctl( mCaptureDevice, VIDIOCGWIN, &win ) < 0 )
      ) << " No valid window found";
  }

  if (Result) 
  {
      Result.SetResult
      (
        !( ioctl( mCaptureDevice, VIDIOCGPICT, &vpic ) < 0 )
      ) << " No valid picture found";
  }

  if( Result && ( cap.type & VID_TYPE_MONOCHROME ) ) 
  {
    vpic.depth = 8;
    vpic.palette = VIDEO_PALETTE_GREY; /* 8bit grey */
    if( ioctl( mCaptureDevice, VIDIOCSPICT, &vpic) < 0) 
    {
      vpic.depth = 6;
      if( ioctl( mCaptureDevice, VIDIOCSPICT, &vpic ) < 0 ) 
      {
        vpic.depth = 4;
        Result.SetResult
        (
          !( ioctl( mCaptureDevice, VIDIOCSPICT, &vpic ) < 0 )
        ) << "no cap format";
      }
    }
  }
   
  if( Result && !( cap.type & VID_TYPE_MONOCHROME ) )
  {
    vpic.depth = 24;
    vpic.palette = VIDEO_PALETTE_RGB24;

    if( ioctl( mCaptureDevice, VIDIOCSPICT, &vpic ) < 0 ) 
    {
      vpic.palette = VIDEO_PALETTE_RGB565;
      vpic.depth = 16;

      if( ioctl( mCaptureDevice, VIDIOCSPICT, &vpic ) == -1 ) 
      {
        vpic.palette = VIDEO_PALETTE_RGB555;
        vpic.depth = 15;
        Result.SetResult
        (
          !( ioctl( mCaptureDevice, VIDIOCSPICT, &vpic) == -1 )
        ) << "Unable to find a supported capture format.";
      }
    }
  }

  if( Result ) 
  {
    // Reserve some space
    mpImageBuffer = reinterpret_cast<unsigned char*>
    ( 
      malloc( win.width * win.height * mFrameBpp ) 
    );
    
    // Make sure the result is set
    Result.SetResult
    (
      0 != mpImageBuffer 
    ) << "Out of memory";
  }

  mFrameHeight = win.height;
  mFrameWidth = win.width;
  
  return Result;
}

SResult
CVision::Capture() 
{
  // TODO: Add proper error message
  SResult Result;
  read( mCaptureDevice, mpImageBuffer, mFrameHeight * mFrameWidth * mFrameBpp );
  return Result;
}

SResult
CVision::Initialize(unsigned Width, unsigned Height, const char* Path) 
{
  SResult Result;

  if( Result ) 
  {
    Result.SetResult
    (
      InitCam()
    ) << "Failed to open capture device" << std::endl;
  }

  if( Result ) 
  {
    mMatchFrame.data = NULL;
    mMatchFrame = cv::imread( Path, -1 );
    Result.SetResult
    (
      NULL != mMatchFrame.data
    ) << "Unable to create MatchFrame" << std::endl;
  }
#ifdef CUDASURF
    if (Result) 
    {
      cv::Mat MatchFrameRGBa(mMatchFrame.cols, mMatchFrame.rows, CV_8UC4);
      if( mMatchFrame.channels() >= 3 )
      {
        cv::cvtColor(mMatchFrame, MatchFrameRGBa, CV_RGB2RGBA);
      }
      else
      {
        printf(" kut number of channels: %d\n",  mMatchFrame.channels() );
      }
      surfDetDesMat(&MatchFrameRGBa, ref_ipts, false, 3, 4, 3, 0.004f);
    }
#else
    if (Result) {
    // Create scalar for the SURF mask
    cv::Scalar scaler(255, 255, 255);
    cv::Mat Mask(mMatchFrame.size(), CV_8UC1, scaler);

    // Don't use a mask
    Mask.data = 0;
    mMask = Mask;
    try 
    {
      cv::Mat MatchGrayFrame;
      cvtColor(mMatchFrame, MatchGrayFrame, CV_BGR2GRAY);
      mExtract(MatchGrayFrame, mMask, mKeypoints, mDescriptors);
    } 
    catch (cv::Exception& e) 
    {
      Result.SetResult
      (
        false
      ) << "Extract failed with exception: " << e.what();
    }
  }
#endif
  return Result;
}

SResult
CVision::Update( EStatus& rStatus, cv::Mat& rHomography, std::ofstream& rFile) 
{
    SResult Result;
    rStatus = EStatusOk;
    timeval Start, End, Diff;
#ifdef CUDASURF
    IpPairVec matches;
    IpVec ipts;
#endif
    gettimeofday(&Start, 0);

  //create result variables
  std::vector<cv::KeyPoint> Keypoints;
  std::vector<float> Descriptors;
  cv::Mat GrayFrame;
  
  //result vector for pair matching
  std::vector<Pair> MatchedPairs;

  if (Result) 
  {
    Result.SetResult
    (
      Capture()
    ) << "Unable to grab and retrieve camera frame" << std::endl;
  }
  
  cv::Mat Frame( mFrameHeight, mFrameWidth, CV_8UC3, mpImageBuffer );

#ifdef CUDASURF
	cv::Mat RGBaFrame;
    cvtColor(Frame, RGBaFrame, CV_RGB2RGBA);    
    surfDetDesMat(&RGBaFrame, ipts, true, 3, 4, 2, 0.0004f);

    if (Result)
    {
      mCameraFrame = Frame;
    }


#else
    if (Result) 
    {
      try 
      {
        cvtColor(Frame, GrayFrame, CV_BGR2GRAY);
        mExtract(GrayFrame, mMask, Keypoints, Descriptors);
      } 
      catch (cv::Exception& e) 
      {
        Result.SetResult
        (
          false
      ) << "Extract failed with exception: " << e.what();
        rStatus = EStatusErrorNonFatal;
      }
    }
#endif
    gettimeofday(&End, 0);
    Generic::SubstractTime(&Diff, &End, &Start);
    mTExtract.push_back(Diff.tv_usec);

    gettimeofday(&Start, 0);

#ifdef CUDASURF


    getMatches(ipts, ref_ipts, matches);
    cv::Mat SrcPoints( matches.size( ), 2, CV_32F );
    cv::Mat DstPoints( matches.size( ), 2, CV_32F );		
    for(int index = 0; index < matches.size(); index++ )
    {
      SrcPoints.at<float>( index, 0 ) = matches.at(index).first.x;
      SrcPoints.at<float>( index, 1 ) = matches.at(index).first.y;

      DstPoints.at<float>( index, 0 ) = matches.at(index).second.x;
      DstPoints.at<float>( index, 1 ) = matches.at(index).second.y;
    }

    if(matches.size() == 0)
      printf("No matches found");
#else  
  // find pairs
  if (Result) 
  {
    try 
    {
      Result.SetResult
      (
        FlannFindPairs
        (
          mDescriptors
        , Descriptors
        , mExtract.descriptorSize()
        , MatchedPairs
        )
      ) << "Find Flann pairs failed" << std::endl;
    } 
    catch (cv::Exception& e) 
    {
        Result.SetResult
        (
          false
        ) << "Find Flann pairs failed with exception: " << e.what();
    }
  }

  cv::Mat TSrcPoints(MatchedPairs.size(), 2, CV_32F);
  cv::Mat TDstPoints(MatchedPairs.size(), 2, CV_32F);

  mMatchedKeypoints.clear();

  // Sum values used to calculate the average
  unsigned SumX     = 0, SumY     = 0;
  unsigned AverageX = 0, AverageY = 0;
  int X_direction=0, Y_direction=0;
  unsigned MinX     = 640, MaxX   = 0, MinY = 480, MaxY = 0;
  for( unsigned i = 0; i < MatchedPairs.size() && Result; i++ ) 
  {
    TSrcPoints.at<float>(i, 0) = mKeypoints[ MatchedPairs[ i ].DistanceRowIndex ].pt.x;
    TSrcPoints.at<float>(i, 1) = mKeypoints[ MatchedPairs[ i ].DistanceRowIndex ].pt.y;

    // Source points 
    TDstPoints.at<float>(i, 0) = Keypoints [ MatchedPairs[ i ].IndexValue ].pt.x;
    SumX += Keypoints [ MatchedPairs[ i ].IndexValue ].pt.x;
    MinX = 
    ( 
      MinX < Keypoints [ MatchedPairs[ i ].IndexValue ].pt.x 
    ? MinX 
    : Keypoints [ MatchedPairs[ i ].IndexValue ].pt.x 
    );
    MaxX = 
    ( 
      MaxX > Keypoints [ MatchedPairs[ i ].IndexValue ].pt.x 
    ? MaxX 
    : Keypoints [ MatchedPairs[ i ].IndexValue ].pt.x 
    );

    TDstPoints.at<float>(i, 1) = Keypoints [ MatchedPairs[ i ].IndexValue ].pt.y;
    SumY += Keypoints [ MatchedPairs[ i ].IndexValue ].pt.y;
    MinY = 
    ( 
      MinY < Keypoints [ MatchedPairs[ i ].IndexValue ].pt.y 
    ? MinY 
    : Keypoints [ MatchedPairs[ i ].IndexValue ].pt.y 
    );
    MaxY = 
    ( 
      MaxY > Keypoints [ MatchedPairs[ i ].IndexValue ].pt.y 
    ? MaxY 
    : Keypoints [ MatchedPairs[ i ].IndexValue ].pt.y 
    );
    // sum all the differences in y values of the matches (including sign)
    if(TDstPoints.at<float>(i, 0)-TSrcPoints.at<float>(i, 0)<0){
        X_direction--;
    }
    else{
        X_direction++;
    }
    if(TDstPoints.at<float>(i, 1)-TSrcPoints.at<float>(i, 1)<0){
        Y_direction--;
    }
    else{
        Y_direction++;
    }

    // Debug statements 
    Generic::debugf
    (
    "(%f,%f)=>(%f,%f)\n"
    , TSrcPoints.at<float>(i, 0)
    , TSrcPoints.at<float>(i, 1)
    , TDstPoints.at<float>(i, 0)
    , TDstPoints.at<float>(i, 1)
    );
  }

  if( Result && ( MatchedPairs.size( ) > 0 ) )
  {
    AverageX = SumX / MatchedPairs.size( );
    AverageY = SumY / MatchedPairs.size( );
  }
  
  unsigned ThresholdX = MAX( AverageX - MinX, MaxX - AverageX );
  unsigned ThresholdY = MAX( AverageY - MinY, MaxY - AverageY );

  ThresholdX *= mMatchThreshold;
  ThresholdY *= mMatchThreshold;
    
  cv::Mat SrcPoints( MatchedPairs.size( ), 2, CV_32F );
  cv::Mat DstPoints( MatchedPairs.size( ), 2, CV_32F );
  unsigned index = 0;
  for( unsigned i = 0; i < TSrcPoints.rows && Result; i++ )
  {
    if( abs( TDstPoints.at<float>( i, 0 ) - AverageX ) < ThresholdX &&
        abs( TDstPoints.at<float>( i, 1 ) - AverageY ) < ThresholdY &&
        ThresholdX < 320 &&
        ThresholdY < 240 &&
        ( ( TDstPoints.at<float>( i, 1 )-TSrcPoints.at<float>( i, 1 ) < 0 && Y_direction < 0 ) ||
          ( TDstPoints.at<float>( i, 1 )-TSrcPoints.at<float>( i, 1 ) >= 0 && Y_direction >= 0 )   ) &&
        ( ( TDstPoints.at<float>( i, 0 )-TSrcPoints.at<float>( i, 0 ) < 0 && X_direction < 0 ) ||
          ( TDstPoints.at<float>( i, 0 )-TSrcPoints.at<float>( i, 0 ) >= 0 && X_direction >= 0 ) ) )
    {
      SrcPoints.at<float>( index, 0 ) = TSrcPoints.at<float>( i, 0 );
      SrcPoints.at<float>( index, 1 ) = TSrcPoints.at<float>( i, 1 );

      DstPoints.at<float>( index, 0 ) = TDstPoints.at<float>( i, 0 );
      DstPoints.at<float>( index, 1 ) = TDstPoints.at<float>( i, 1 );
      
      mMatchedKeypoints.push_back( Keypoints[ MatchedPairs[ i ].IndexValue ] );
      
      index++;
    }
  }
  
  if (Result) 
  {
    Result.SetResult
    (
      index > 6
    ) << "Not enough points, skipping frame" << std::endl;
    
    if( !Result )
    {
      rStatus = EStatusObjectLost;
    }
  }

    if( Result )
    {
      SrcPoints.rows = index - 1;
      DstPoints.rows = index - 1;
    }
    if (Result) 
    {
      mCameraFrame = Frame;
    }
#endif    
    gettimeofday(&End, 0);
    Generic::SubstractTime(&Diff, &End, &Start);
    mTMatch.push_back(Diff.tv_usec);
    gettimeofday(&Start, 0);

    if (Result) 
    {
      try 
      {
       rHomography = cv::findHomography(SrcPoints, DstPoints,CV_RANSAC, 3.0);
        Generic::debugf
        (
          rFile
        , "H:     [ %f ],\t [ %f ], [ %f ] \n" \
          "       [ %f ],\t [ %f ], [ %f ] \n" \
          "       [ %f ],\t [ %f ], [ %f ] \n"
        , rHomography.at<double>(0, 0)
        , rHomography.at<double>(0, 1)
        , rHomography.at<double>(0, 2)
        , rHomography.at<double>(1, 0)
        , rHomography.at<double>(1, 1)
        , rHomography.at<double>(1, 2)
        , rHomography.at<double>(2, 0)
        , rHomography.at<double>(2, 1)
        , rHomography.at<double>(2, 2)
        );

      // Normalize results
      rHomography /= rHomography.at<double>(1, 1);
      Generic::debugf
      (
        rFile
      , "H:     [ %f ],\t [ %f ], [ %f ] \n" \
        "       [ %f ],\t [ %f ], [ %f ] \n" \
        "       [ %f ],\t [ %f ], [ %f ] \n"
      , rHomography.at<double>(0, 0)
      , rHomography.at<double>(0, 1)
      , rHomography.at<double>(0, 2)
      , rHomography.at<double>(1, 0)
      , rHomography.at<double>(1, 1)
      , rHomography.at<double>(1, 2)
      , rHomography.at<double>(2, 0)
      , rHomography.at<double>(2, 1)
      , rHomography.at<double>(2, 2)
      );

      // Sign extend results
      rHomography *= (rHomography.at<double>(2, 2) >= 0 ? 1 : -1);
      Generic::debugf
      (
        rFile
      , "H:     [ %f ],\t [ %f ], [ %f ] \n" \
        "       [ %f ],\t [ %f ], [ %f ] \n" \
        "       [ %f ],\t [ %f ], [ %f ] \n"
      , rHomography.at<double>(0, 0)
      , rHomography.at<double>(0, 1)
      , rHomography.at<double>(0, 2)
      , rHomography.at<double>(1, 0)
      , rHomography.at<double>(1, 1)
      , rHomography.at<double>(1, 2)
      , rHomography.at<double>(2, 0)
      , rHomography.at<double>(2, 1)
      , rHomography.at<double>(2, 2)
      );
    } 
    catch (cv::Exception& e) 
    {
      rStatus = EStatusErrorNonFatal;
      Result.SetResult
      (
        false
      ) << "Homography failed with exception: " << e.what();
    }
  }

  gettimeofday(&End, 0);
  Generic::SubstractTime(&Diff, &End, &Start);
  mTHomography.push_back(Diff.tv_usec);
  
  return Result;
}

SResult
CVision::DetectObstruction( EObstructionStatus& rStatus, double& rDistanceX, double& rDistanceY )
{
  SResult Result;

  // HSV Colors are used for object detection 
  cv::Mat HsvCameraFrame;
  cv::Mat BinaryFrame(   mFrameHeight, mFrameWidth, CV_8UC1 );
  cv::Mat FilteredFrame = cv::Mat::zeros( mFrameHeight, mFrameWidth, CV_8UC1 );

  // Convert the camera frame
  cv::cvtColor( mCameraFrame, HsvCameraFrame, CV_BGR2HSV );

    // Convert HSV camera frame to binary image ( extract blue )
  for( unsigned i = 0; i < mFrameHeight; i++ )
  {
    for( unsigned j = 0; j < mFrameWidth; j++ )
    {
      //orgiginal 107, 115, 40, 40
      if( ( HsvCameraFrame.data[ ( i * mFrameWidth + j ) * 3 ] > 103 )     &&
        (     HsvCameraFrame.data[ ( i * mFrameWidth + j ) * 3 ] < 118 )     &&
        (     HsvCameraFrame.data[ ( i * mFrameWidth + j ) * 3 + 1 ] > 40 )  &&
        (     HsvCameraFrame.data[ ( i * mFrameWidth + j ) * 3 + 2 ] > 40 ) )
      {
        BinaryFrame.data[ ( i * mFrameWidth + j ) ] = 255;
      }
      else
      {
        BinaryFrame.data[ ( i * mFrameWidth + j ) ] = 0;
      }
    }
  }

  //

  // Apply box filter to the binary image 
  // Determine extremes / edges 
  unsigned MaxX = 0, MinX = mFrameWidth;
  unsigned MaxY = 0, MinY = mFrameHeight;
  for( int i = mFilterBoxSize; i < ( mFrameHeight - mFilterBoxSize ); i++ )
  {
    for( int j = mFilterBoxSize; j < ( mFrameWidth - mFilterBoxSize ); j++ )
    {
       
       unsigned WhitePixelsInArea = 0;
      for( int k = -mFilterBoxSize; k <= mFilterBoxSize; k++ )
      {
        for( int l = -mFilterBoxSize; l <= mFilterBoxSize; l++ )
        {
          if( 255 == BinaryFrame.data[ ( ( i + k ) * mFrameWidth ) + j + l ] )
          {
            WhitePixelsInArea++;
          }
        }
      }

      // Pixed threshold in the box area
      if( WhitePixelsInArea > 115 )
      {
        FilteredFrame.data[ ( i * mFrameWidth ) + j ] = 255;
        MaxX = ( ( j > MaxX ) ? j : MaxX );
        MinX = ( ( j < MinX ) ? j : MinX );
        MaxY = ( ( i > MaxY ) ? i : MaxY );
        MinY = ( ( i < MinY ) ? i : MinY );
      }
      else
      {
        FilteredFrame.data[ ( i * mFrameWidth ) + j ] = 0;
      }
    }
  }
 //cv::imshow("Image", FilteredFrame );
  // Set obstruction status
  rStatus = ( -mFrameWidth == MaxX - MinX ) 
    ? EObstructionStatusNoObject 
    : EObstructionStatusObjectDetected;

  // If object is detected, draw box
  if( EObstructionStatusObjectDetected == rStatus )
  {
    for(unsigned  i = MinX; i <= MaxX; i++ )
    {
      mCameraFrame.data[ ( mFrameWidth * MinY + i ) * 3 ]      = 0;
      mCameraFrame.data[ ( mFrameWidth * MinY + i ) * 3 + 1 ]  = 255;
      mCameraFrame.data[ ( mFrameWidth * MinY + i ) * 3 + 2 ]  = 0;
      mCameraFrame.data[ ( mFrameWidth * MaxY + i ) * 3 ]      = 0;
      mCameraFrame.data[ ( mFrameWidth * MaxY + i ) * 3 + 1 ]  = 255;
      mCameraFrame.data[ ( mFrameWidth * MaxY + i ) * 3 + 2 ]  = 0;
    }
    for(unsigned  i = MinY; i <= MaxY; i++ )
    {
      mCameraFrame.data[ ( mFrameWidth * i + MinX ) * 3 ]      = 0;
      mCameraFrame.data[ ( mFrameWidth * i + MinX ) * 3 + 1 ]  = 255;
      mCameraFrame.data[ ( mFrameWidth * i + MinX ) * 3 + 2 ]  = 0;
      mCameraFrame.data[ ( mFrameWidth * i + MaxX ) * 3 ]      = 0;
      mCameraFrame.data[ ( mFrameWidth * i + MaxX ) * 3 + 1 ]  = 255;
      mCameraFrame.data[ ( mFrameWidth * i + MaxX ) * 3 + 2 ]  = 0;
    }	
  }

  // If the object is in the center it's easy, assume it is
  int       ObstructionWidth  = MAX( MaxX - MinX, MaxY - MinY );
  unsigned  ObstructionX      = ( MaxX + MinX ) / 2;
  unsigned  ObstructionY      = ( MaxY + MinY ) / 2;
  if( EObstructionStatusObjectDetected == rStatus )
  {
    // Determine Distance and angle with this information
    Generic::debugf( "  Obstruction width:  %d\n", ObstructionWidth );
    Generic::debugf( "  Obstruction center: %d,%d\n", ObstructionX, ObstructionY );
  
    // Set the real distance 
    if( ObstructionWidth < 200 )
    {
      rDistanceX = 60 + ( static_cast<double>( ObstructionWidth ) * -0.2 );
    }
    else
    {
      rDistanceX = 30 + ( static_cast<double>( ObstructionWidth ) * -0.045 );
    }

    // Set the angle
    rDistanceY = ( ( static_cast<double>( ObstructionX ) - 320) / 33 );
  }
  return Result;
}

cv::Mat&
CVision::GetMatchFrame(void) 
{
    return mMatchFrame;
}

cv::Mat&
CVision::GetCameraFrame(void) 
{
    return mCameraFrame;
}

cv::VideoCapture&
CVision::GetCapture(void) 
{
    //return mCapture;
}

bool
CVision::FlannFindPairs
(
  const std::vector<float>& rDescriptors,
  const std::vector<float>& rDescriptorsFrame,
  const int DescriptorSize,
  std::vector<Pair>& rMatchPairs
) 
{
  bool Result = true;

  int ObjectRows = rDescriptors.size() / DescriptorSize;
  int ImageRows = rDescriptorsFrame.size() / DescriptorSize;

  // Declare objects
  cv::Mat Object
  (
    ObjectRows,
    DescriptorSize,
    CV_32F,
    (void*) &rDescriptors[0],
    sizeof ( float) * DescriptorSize
  );

  cv::Mat Image
  (
    ImageRows,
    DescriptorSize,
    CV_32F,
    (void*) &rDescriptorsFrame[0],
    sizeof ( float) * DescriptorSize
  );

  // Declare return values for the Flann function
  cv::Mat Indices(ObjectRows, 2, CV_32S);
  cv::Mat Distances(ObjectRows, 2, CV_32F);

  // Set the Flann index to 4 randomized KD trees
  cv::flann::Index FlannIndex
  (
    Image,
    cv::flann::KDTreeIndexParams(4)
  );

  // Use the maximum number of leaves (64)
  try 
  {
    FlannIndex.knnSearch
    (
      Object,
      Indices,
      Distances,
      2,
      cv::flann::SearchParams(128)
    );
  } 
  catch (cv::Exception& e) 
  {
      printf( "Flann exception: %s", e.what() );
      Result = false;
  }

  for (unsigned i = 0; i < Distances.rows && Result; i++) 
  {
      if (Distances.at<float>(i, 0) < 0.6 * Distances.at<float>(i, 1)) 
      {
          Pair Match(i, Indices.at<int>(i, 0));
          rMatchPairs.push_back(Match);
      }
  }

  return Result;
}

void
CVision::DrawKeyframes(cv::Mat& Frame, EKeypoints Keypoints) 
{
  std::vector<cv::KeyPoint>& rKeyPoints
  (
    EKeypointsOriginal == Keypoints ? mKeypoints : mMatchedKeypoints
  );

  // Color of the keypoint circle
  cv::Scalar Color(0, 255, 255);

  // Draw each individual frame as a circle
  for (int i = 0; i < rKeyPoints.size(); i++) 
  {
    cv::Point Center;
    int Radius;

    Center.x = static_cast<int> (rKeyPoints[i].pt.x);
    Center.y = static_cast<int> (rKeyPoints[i].pt.y);
    Radius = static_cast<int> (rKeyPoints[i].size / 10);

    cv::circle(Frame, Center, Radius, Color, 1, 8, 0);
  }
}

std::vector<unsigned long>&
CVision::GetBenchmark( int Id ) 
{
  switch (Id) 
  {
    case 0:
        return mTHomography;
        break;
    case 1:
        return mTExtract;
        break;
    case 2:
        return mTMatch;
        break;
  }
}

