#include "DynamicObject.h"
#include "MapPoint.h"
#include "OpenGL.h"
#include <cvd/vision.h>
#include <cvd/fast_corner.h>
#include <cvd/vector_image_ref.h>
#include <cvd/image_interpolate.h>
#include <TooN/Cholesky.h>
#include <opencv/cv.h>
#include <opencv/cvaux.h>
#include <opencv/highgui.h>

// tmmintrin.h contains SSE3<> instrinsics, used for the ZMSSD search at the bottom..
// If this causes problems, just do #define CVD_HAVE_XMMINTRIN 0
#if CVD_HAVE_XMMINTRIN
#include <tmmintrin.h>
#endif

using namespace CVD;
using namespace std;

DynamicObject::DynamicObject(const ATANCamera &c, int nPatchSize)
: mCamera(c),
  mimTemplate(ImageRef(nPatchSize,nPatchSize))
{
  mnPatchSize = nPatchSize;
  mirCenter = ImageRef(nPatchSize/2, nPatchSize/2);
  int nMaxSSDPerPixel = 500; // Pretty arbitrary... could make a GVar out of this. (500)
  mnMaxSSD = mnPatchSize * mnPatchSize * nMaxSSDPerPixel;
  CameraSize = mCamera.GetImageSize();
  OldFrame.reset();
  nFramesSkiped = 5;


};


struct Sort_Scores {
  bool operator()(const std::pair<CVD::ImageRef,int> &left, std::pair<CVD::ImageRef,int> &right) {
    return left.second > right.second;
  }
} SortScores ;

void DynamicObject::NewFrame(uint64_t timestamp,boost::shared_ptr<KeyFrame> CurrentKeyFrame, boost::shared_ptr<KeyFrame> CurrentFrame, std::vector<TooN::Vector<3> > &vTest)
{
  //Constants
  double TransThreshold = 0.1;
  double RotThreshold = 4*M_PI/180;
  double ThThreshold = 20*M_PI/180;
  double ImgDistMaxThreshold = 0.2*fmax(CameraSize[0],CameraSize[1]);
  double ImgDistMinThreshold = 0.05*fmax(CameraSize[0],CameraSize[1]);
  int maxFramesSkiped = 0;
  int NFeatures = 1000;
  //Begin
  vTest.clear();

  if (OldFrame)
  {
    std::cout << "Begin********************************************" << std::endl;
    //Compute Translation and Rotation between frames:
    TooN::SE3<double> se3CurrFromOld;  //transformation from old frame to current frame
    se3CurrFromOld = CurrentFrame->se3CfromW * OldFrame->se3CfromW.inverse();
    TooN::Vector<3> nToPlane = makeVector(0,0,-1);
    double meanDepth = OldFrame->dSceneDepthMean;
    TooN::Vector<3> CurrFromOld_Translation = se3CurrFromOld.get_translation();
    TooN::Matrix<3,3> CurrFromOld_Rotation = se3CurrFromOld.get_rotation().get_matrix();
    TooN::Matrix<3,3> HomCurrFromOld;
    HomCurrFromOld =CurrFromOld_Rotation -((CurrFromOld_Translation.as_col()*nToPlane.as_row())/meanDepth);
    //Determine movement of the camera
    int CameraTranslation; //0->no significant translation;1->planar translation;2->vertical translation
    double PlanarTrans = sqrt(pow(CurrFromOld_Translation[0],2)+pow(CurrFromOld_Translation[1],2));
    double VerticalTrans = abs(CurrFromOld_Translation[2]);
    double ThetaRot = asin(-CurrFromOld_Rotation(2,0));
    double PhiRot = asin(CurrFromOld_Rotation(2,1)/cos(ThetaRot));
    double PsiRot = acos(CurrFromOld_Rotation(0,0)/cos(ThetaRot));
    if(PlanarTrans > TransThreshold)
      CameraTranslation = 1;
    else if (VerticalTrans > TransThreshold)
      CameraTranslation = 2;
    else if ((ThetaRot > RotThreshold)||(PhiRot > RotThreshold)||(PsiRot > RotThreshold))
      CameraTranslation = 3;
    else
      CameraTranslation = 0;
    std::cout << "CameraTranslation/nFramesSkiped  " << CameraTranslation << "/"<< nFramesSkiped << std::endl;
    if(CameraTranslation == 1 || CameraTranslation == 2 || CameraTranslation==3 || (CameraTranslation == 0 && nFramesSkiped > maxFramesSkiped))
    {
      //aux = 1;
      nFramesSkiped = 0;
      //Image Mat of Current Frame
      cv::Mat CurrentFrameMat = cv::Mat(CameraSize[0],CameraSize[1], CV_8UC1,CurrentFrame->aLevels[0].im.data(),CurrentFrame->aLevels[0].im.row_stride());
      //Compute Real Optical Flow
      std::vector<cv::Point2f> CurrFrameFeaturePos;
      std::vector<unsigned char> status;
      std::vector<float> err;
      cv::Size winSize(15,15);
      cv::TermCriteria criteria(cv::TermCriteria::COUNT + cv::TermCriteria::EPS, 30, 0.01);
      cv::calcOpticalFlowPyrLK(OldFrameMat,CurrentFrameMat,OldFrameFeaturePos,CurrFrameFeaturePos,status,err,winSize,3,criteria,0.5,0);
      std::cout << "Optical flow calculated with n points: " << OldFrameFeaturePos.size() <<std::endl;
      //Artificial Optical Flow
      std::vector<cv::Point2f> CurrFrameArtificialFeaturePos;
      TooN::Vector<3> CurrArtiPos;
      cv::Point2f CurrArtiPoint;
      for (int i=0;i<(int)OldFrameFeaturePos.size();i++)
      {
        CurrArtiPos = HomCurrFromOld * makeVector(OldFrameFeaturePos[i].x,OldFrameFeaturePos[i].y,1);
        CurrArtiPoint.x = CurrArtiPos[0];
        CurrArtiPoint.y = CurrArtiPos[1];
        CurrFrameArtificialFeaturePos.push_back(CurrArtiPoint);
      }
      //Compare the two optical flows
      double xo,yo,xr,yr,xa,ya;
      double hyp_real, hyp_arti,th_real,th_arti,ImgDist;
      double div_x,div_y,maxDiv,minDiv;
      maxDiv = -10000;
      minDiv = 10000;
      TooN::Matrix<>  MovementTrans(CameraSize[0],CameraSize[1]);
      TooN::Matrix<>  MovementDiv(CameraSize[0],CameraSize[1]);
      TooN::Matrix<>  Divergence(CameraSize[0],CameraSize[1]);
      std::vector<StereoMeasurement> FeaturesTrans;

      for (int i=0;i<CameraSize[0];i++)
      {
        for(int j=0;j<CameraSize[1];j++)
        {
          MovementTrans(i,j) = NAN;
          MovementDiv(i,j) = NAN;
        }
      }
      for (int i=0;i<(int)OldFrameFeaturePos.size();i++)
      {
        xo = OldFrameFeaturePos[i].x;
        yo = OldFrameFeaturePos[i].y;
        xr = CurrFrameFeaturePos[i].x;
        yr = CurrFrameFeaturePos[i].y;
        xa = CurrFrameArtificialFeaturePos[i].x;
        ya = CurrFrameArtificialFeaturePos[i].y;
        hyp_real = sqrt(pow(xr-xo,2)+pow(yr-yo,2));
        hyp_arti = sqrt(pow(xa-xo,2)+pow(ya-yo,2));
        th_real = asin((yr-yo)/hyp_real);
        th_arti = asin((ya-yo)/hyp_arti);
        ImgDist = sqrt(pow(yr-yo,2)+pow(xr-xo,2));
        switch(CameraTranslation)
        {
          case 1:
          case 2:
          case 3:
            MovementTrans(xo,yo) = 0;
            if(abs(th_real-th_arti)>ThThreshold && ImgDist < ImgDistMaxThreshold) //point is moving
            {
              MovementTrans(xo,yo) = 1;
              StereoMeasurement auxFeature;
              auxFeature.v2ImPos1[0] = xo;
              auxFeature.v2ImPos1[1] = yo;
              auxFeature.v2ImPos2[0] = xr;
              auxFeature.v2ImPos2[1] = yr;
              FeaturesTrans.push_back(auxFeature);
            }
            break;
          case 0:
            MovementTrans(xo,yo) = 0;
            if (ImgDist > ImgDistMinThreshold)
            {
              MovementTrans(xo,yo) = 1;
              StereoMeasurement auxFeature;
              auxFeature.v2ImPos1[0] = xo;
              auxFeature.v2ImPos1[1] = yo;
              auxFeature.v2ImPos2[0] = xr;
              auxFeature.v2ImPos2[1] = yr;
              FeaturesTrans.push_back(auxFeature);
            }
            break;
        }
        //Divergence
        if(ImgDist < ImgDistMaxThreshold)
        {
          if (abs(xr-xo)>0.0001)
            div_x = (yr-yo)/(xr-xo);
          else
            div_x = 0;
          if(abs(yr-yo)>0.0001)
            div_y = (xr-xo)/(yr-yo);
          else
            div_y = 0;
          Divergence(xo,yo) = div_x+div_y;
          minDiv = fmin(minDiv,div_x+div_y);
          maxDiv = fmax(maxDiv,div_x+div_y);
        }
      }
      std::vector<DynObjwithFeatures> DynObjectsFeatures;
      std::cout << "Before extracting dynamic object" << std::endl;
      DynObjectsFeatures = DynamicObject::ExtractDynamicObjects(MovementTrans,FeaturesTrans);
      std::cout << "Before convert to world" << std::endl;
      if (DynObjectsFeatures.size()>0)
      {
        std::vector<DynObj> DynObjects = DynamicObject::ConvertToWorld(DynObjectsFeatures,CurrentFrame,OldFrame,timestamp,OldFrameTimestamp);
        std::cout << "Before tracking" << std::endl;
        DynamicObject::TrackDynObj(DynObjects);
        std::cout << "end of process" << std::endl;
      }
      //Draw Optical Flow
      glPointSize(5);
      glLineWidth(2);
      glEnable(GL_POINT_SMOOTH);
      glEnable(GL_LINE_SMOOTH);
      glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
      glEnable(GL_BLEND);
      //glBegin(GL_LINES);
      glBegin(GL_LINES);
      TooN::Vector<2> vec1,vec2,vec2_a;
      for(int i=0;i<(int)OldFrameFeaturePos.size();i++)
      {
        vec1[0] = OldFrameFeaturePos[i].x;
        vec1[1] = OldFrameFeaturePos[i].y;
        vec2[0] = CurrFrameFeaturePos[i].x;
        vec2[1] = CurrFrameFeaturePos[i].y;
        vec2_a[0] = CurrFrameArtificialFeaturePos[i].x;
        vec2_a[1] = CurrFrameArtificialFeaturePos[i].y;
        ImgDist = sqrt(pow(vec1[0]-vec2[0],2)+pow(vec1[1]-vec2[1],2));
        if (ImgDist < ImgDistMaxThreshold)
        {
          /*
          //Draw Divergence based on point size
          int p_size = floor((Divergence(vec1[0],vec1[1])-minDiv)/(maxDiv-minDiv)*(10-1)+1);
          std::cout << p_size << std::endl;

          glVertex(vec2);
           */

          if(MovementTrans(vec1[0],vec1[1])==0)
          {
            glColor3f(0,1,0);
            glVertex(vec1);
            glColor3f(0,0,1);
            glVertex(vec2);
          }
          else
          {
            glColor3f(1,0,0);
            glVertex(vec1);
            glColor3f(0,0,1);
            glVertex(vec2);
          }
//          glColor3f(0,0,1);
//          glVertex(vec1);
//          glColor3f(0,1,0);
//          glVertex(vec2);
//          glColor3f(0,0,1);
//          glVertex(vec1);
//          glColor3f(1,0,0);
//          glVertex(vec2_a);

        }
      }
      glEnd();

      //Update OldFrame
      OldFrame = CurrentFrame;
      OldFrameTimestamp = timestamp;
      OldFrameMat = CurrentFrameMat;
      //Select best features from CurrentFrame to use them in the following iterations
      CVD::fast_nonmax_with_scores(CurrentFrame->aLevels[0].im,CurrentFrame->aLevels[0].vCorners,10,CurrentFrame->aLevels[0].vMaxCorners);
      std::partial_sort(CurrentFrame->aLevels[0].vMaxCorners.begin(), CurrentFrame->aLevels[0].vMaxCorners.begin()+NFeatures, CurrentFrame->aLevels[0].vMaxCorners.end(), SortScores);
      OldFrameFeaturePos.clear();
      cv::Point2f auxPoint;
      for(int i = 0;i<NFeatures;i++)
      {
        auxPoint.x = CurrentFrame->aLevels[0].vMaxCorners[i].first.x;
        auxPoint.y = CurrentFrame->aLevels[0].vMaxCorners[i].first.y;
        OldFrameFeaturePos.push_back(auxPoint);
      }
    }
    nFramesSkiped++;
  }
  else
  {
    //Update OldFrame
    cv::Mat CurrentFrameMat = cv::Mat(CameraSize[0],CameraSize[1], CV_8UC1,CurrentFrame->aLevels[0].im.data(),CurrentFrame->aLevels[0].im.row_stride());
    OldFrame = CurrentFrame;
    OldFrameTimestamp = timestamp;
    OldFrameMat = CurrentFrameMat;
    //Select best features from CurrentFrame to use them in the following iterations
    CVD::fast_nonmax_with_scores(CurrentFrame->aLevels[0].im,CurrentFrame->aLevels[0].vCorners,10,CurrentFrame->aLevels[0].vMaxCorners);
    std::partial_sort(CurrentFrame->aLevels[0].vMaxCorners.begin(), CurrentFrame->aLevels[0].vMaxCorners.begin()+NFeatures, CurrentFrame->aLevels[0].vMaxCorners.end(), SortScores);
    OldFrameFeaturePos.clear();
    cv::Point2f auxPoint;
    for(int i = 0;i<NFeatures;i++)
    {
      auxPoint.x = CurrentFrame->aLevels[0].vMaxCorners[i].first.x;
      auxPoint.y = CurrentFrame->aLevels[0].vMaxCorners[i].first.y;
      OldFrameFeaturePos.push_back(auxPoint);
    }
  }
}

std::vector<DynObjwithFeatures> DynamicObject::ExtractDynamicObjects(TooN::Matrix<> M, std::vector<StereoMeasurement> Features)
{
  std::cout << "Begin extraction" << std::endl;
  double ThetaThreshold = 15*M_PI/180;
  double DistThreshold = 0.05*fmax(CameraSize[0],CameraSize[1]);
  int MinObjectFeatureSize = 5;
  std::vector<DynObjwithFeatures > Objects;
  bool insert;
  for (int i = 0;i<(int)Features.size();i++)
  {
 //   std::cout << "Feature " << i << std::endl;
    insert = false;
    double auxincPos = sqrt(pow(Features[i].v2ImPos2[0]-Features[i].v2ImPos1[0],2)+pow(Features[i].v2ImPos2[1]-Features[i].v2ImPos1[1],2));
    double auxTheta = asin((Features[i].v2ImPos2[1]-Features[i].v2ImPos1[1])/auxincPos);

    for(int j = 0; j<(int)Objects.size();j++)
    {
  //    std::cout << "Object " << j << std::endl;
      if(abs(auxTheta-Objects[j].Theta)< ThetaThreshold)
      {

        //Check distances
        for(int k = 0;k<(int)Objects[j].Features.size();k++)
        {
          double dist = sqrt(pow(Objects[j].Features[k].v2ImPos2[0]-Features[i].v2ImPos2[0],2)+pow(Objects[j].Features[k].v2ImPos2[1]-Features[i].v2ImPos2[1],2));
          if (dist < DistThreshold)
          {
            insert = true;
           // std::cout << "Feature i insertado en objeto j  " << i <<"  "<< j<< std::endl;
            break;
          }
        }
      }
      if(insert)
      {
    //    std::cout << "Se inserta" << std::endl;
        Objects[j].Theta = (Objects[j].Theta * Objects[j].Features.size()+auxTheta)/(Objects[j].Features.size()+1);
        Objects[j].incPos = (Objects[j].incPos * Objects[j].Features.size()+auxincPos)/(Objects[j].Features.size()+1);
        Objects[j].Center.x = (Objects[j].Center.x * Objects[j].Features.size()+Features[j].v2ImPos2[0])/(Objects[j].Features.size()+1);
        Objects[j].Center.y = (Objects[j].Center.y * Objects[j].Features.size()+Features[j].v2ImPos2[1])/(Objects[j].Features.size()+1);
        Objects[j].CenterOld.x = (Objects[j].CenterOld.x * Objects[j].Features.size()+Features[j].v2ImPos1[0])/(Objects[j].Features.size()+1);
        Objects[j].CenterOld.y = (Objects[j].CenterOld.y * Objects[j].Features.size()+Features[j].v2ImPos1[1])/(Objects[j].Features.size()+1);

        Objects[j].Features.push_back(Features[i]);
        break;
      }
    }
    if (insert == false)
    {
      // std::cout << "Creando nuevo objeto" << std::endl;
      DynObjwithFeatures auxObj;
      auxObj.Center.x = Features[i].v2ImPos2[0];
      auxObj.Center.y = Features[i].v2ImPos2[1];
      auxObj.CenterOld.x = Features[i].v2ImPos1[0];
      auxObj.CenterOld.y = Features[i].v2ImPos1[1];
      auxObj.Features.push_back(Features[i]);
      auxObj.incPos = auxincPos;
      auxObj.Theta = auxTheta;
      Objects.push_back(auxObj);
    }
  }

  std::cout << "Number of Features/ Dynamic Objects  " << Features.size() << "/" << Objects.size() << std::endl;
  //Analyze obtained objects
  std::vector<DynObjwithFeatures> ObjectsFiltered;
  bool keep;
  for(int i =0;i<(int)Objects.size();i++)
  {
    keep = true;
    if((int)Objects[i].Features.size()< MinObjectFeatureSize)
      keep = false;

    if(keep)
      ObjectsFiltered.push_back(Objects[i]);
  }
  std::cout << "Number of Filtered Objects  " <<  ObjectsFiltered.size() << std::endl;

  glPointSize(10);
  glLineWidth(2);
  glEnable(GL_POINT_SMOOTH);
  glEnable(GL_LINE_SMOOTH);
  glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
  glEnable(GL_BLEND);
  //glBegin(GL_LINES);
  glBegin(GL_POINTS);
  glColor3f(1,1,1);
  TooN::Vector<2> vec1;
  for(int i = 0;i<(int)ObjectsFiltered.size();i++)
  {
    vec1[0] = ObjectsFiltered[i].Center.x;
    vec1[1] = ObjectsFiltered[i].Center.y;
    glColor3f(1,0,1);
    glVertex(vec1);
  }
  glEnd();


  return ObjectsFiltered;
}

struct sort_distances {
  bool operator() (TooN::Vector<2> V1,TooN::Vector<2> V2) { return (V1[1]<V2[1]);}
} SortDistances;

std::vector<DynObj> DynamicObject::ConvertToWorld(std::vector<DynObjwithFeatures> DObjs,boost::shared_ptr<KeyFrame> CurrentFrame,boost::shared_ptr<KeyFrame> OldFr,uint64_t currtimestamp,uint64_t oldtimestamp)
{
  std::vector<DynObj> DynObjs;
  std::map<boost::shared_ptr<MapPoint>, Measurement>::iterator it;
  TooN::Vector<2> distance;
  it = CurrentFrame->mMeasurements.begin();
  std::vector<TooN::Vector<2> > Distances,DistancesOld;

  double incTime = (currtimestamp-oldtimestamp)/1000000; //Time between CurrentFrame and OldFrame

  for(int i=0;i<(int)DObjs.size();i++)
  {
    DynObj Daux;
    //Center Position in Current Frame
    int aux = 0;
    Distances.clear();
    Distances.resize(CurrentFrame->mMeasurements.size());
    for (it = CurrentFrame->mMeasurements.begin();it!=CurrentFrame->mMeasurements.end(); it++)
    {
      distance[0] = aux;
      distance[1] = sqrt(pow(it->second.v2RootPos[0]-DObjs[i].Center.x,2)+pow(it->second.v2RootPos[1]-DObjs[i].Center.y,2));
      aux++;
      Distances.push_back(distance);
    }
    std::sort(Distances.begin(),Distances.end(),SortDistances);
    //Interpolation of Real Position
    Matrix<3> IP_C; //Interpolation points changed into camera coordinates where origin is 0,0,0:
    it = CurrentFrame->mMeasurements.begin();
    std::advance(it,Distances[0][0]);
    IP_C.T()[0] = (CurrentFrame->se3CfromW*it->first->v3WorldPos);
    it = CurrentFrame->mMeasurements.begin();
    std::advance(it,Distances[1][0]);
    IP_C.T()[1] = (CurrentFrame->se3CfromW*it->first->v3WorldPos);
    it = CurrentFrame->mMeasurements.begin();
    std::advance(it,Distances[2][0]);
    IP_C.T()[2] = (CurrentFrame->se3CfromW*it->first->v3WorldPos);

    //Transform the pixel position to a vector in the camera frame
    TooN::Vector<2> CenterPx;
    CenterPx[0] = DObjs[i].Center.x;
    CenterPx[1] = DObjs[i].Center.y;
    TooN::Vector<3> CenterVec_C = TooN::unit(TooN::unproject(mCamera.UnProject(CVD::ir(CenterPx))));

    //Plane calculation Ax+By+Cz+1 = 0;
    double D = CalcDeterminant(IP_C);
    TooN::Matrix<3> auxMat = IP_C;
    auxMat[0] = TooN::Ones(3);
    double A = (-1/D)*CalcDeterminant(auxMat.T());
    auxMat = IP_C;
    auxMat[1] = TooN::Ones(3);
    double B = (-1/D)*CalcDeterminant(auxMat.T());
    auxMat = IP_C;
    auxMat[2] = TooN::Ones(3);
    double C = (-1/D)*CalcDeterminant(auxMat.T());
    //Calculate the intersection between the plane and the line
    //Ecuation of the line: x = xd*t;y=yd*t;z=zd*t with (xd,yd,zd) the center vector in the camera frame
    double t = -1/(A*CenterVec_C[0]+B*CenterVec_C[1]+C*CenterVec_C[2]);

    //Position of the center in the camera frmae
    TooN::Vector<3> CenterPos_C,CenterPosOld_C;
    CenterPos_C[0] = CenterVec_C[0]*t;
    CenterPos_C[1] = CenterVec_C[1]*t;
    CenterPos_C[2] = CenterVec_C[2]*t;

    //Position of the center in the world frame
    TooN::Vector<3> CenterPos = TooN::makeVector(1,2,3);

    CenterPos = (CurrentFrame->se3CfromW.inverse())*CenterPos_C;

    Daux.Center.x = CenterPos[0];
    Daux.Center.y = CenterPos[1];
    Daux.Center.z = CenterPos[2];


    //Center Position in Old Frame
    aux = 0;
    Distances.clear();
    Distances.resize(OldFr->mMeasurements.size());
    for (it = OldFr->mMeasurements.begin();it!=OldFr->mMeasurements.end(); it++)
    {
      distance[0] = aux;
      distance[1] = sqrt(pow(it->second.v2RootPos[0]-DObjs[i].CenterOld.x,2)+pow(it->second.v2RootPos[1]-DObjs[i].CenterOld.y,2));
      aux++;
      Distances.push_back(distance);
    }
    std::sort(Distances.begin(),Distances.end(),SortDistances);
    //Interpolation of Real Position
   //Interpolation points changed into camera coordinates where origin is 0,0,0:
    it = OldFr->mMeasurements.begin();
    std::advance(it,Distances[0][0]);
    IP_C.T()[0] = (OldFr->se3CfromW*it->first->v3WorldPos);
    it = OldFr->mMeasurements.begin();
    std::advance(it,Distances[1][0]);
    IP_C.T()[1] = (OldFr->se3CfromW*it->first->v3WorldPos);
    it = OldFr->mMeasurements.begin();
    std::advance(it,Distances[2][0]);
    IP_C.T()[2] = (OldFr->se3CfromW*it->first->v3WorldPos);

    //Transform the pixel position to a vector in the camera frame
    //TooN::Vector<2> CenterPx;
    CenterPx[0] = DObjs[i].CenterOld.x;
    CenterPx[1] = DObjs[i].CenterOld.y;
    CenterVec_C = TooN::unit(TooN::unproject(mCamera.UnProject(CVD::ir(CenterPx))));

    //Plane calculation Ax+By+Cz+1 = 0;
    D = CalcDeterminant(IP_C);
    auxMat = IP_C;
    auxMat[0] = TooN::Ones(3);
    A = (-1/D)*CalcDeterminant(auxMat.T());
    auxMat = IP_C;
    auxMat[1] = TooN::Ones(3);
    B = (-1/D)*CalcDeterminant(auxMat.T());
    auxMat = IP_C;
    auxMat[2] = TooN::Ones(3);
    C = (-1/D)*CalcDeterminant(auxMat.T());
    //Calculate the intersection between the plane and the line
    //Ecuation of the line: x = xd*t;y=yd*t;z=zd*t with (xd,yd,zd) the center vector in the camera frame
    t = -1/(A*CenterVec_C[0]+B*CenterVec_C[1]+C*CenterVec_C[2]);

    //Position of the center in the camera frmae
    //  TooN::Vector<3> CenterPos_C;
    CenterPos_C[0] = CenterVec_C[0]*t;
    CenterPos_C[1] = CenterVec_C[1]*t;
    CenterPos_C[2] = CenterVec_C[2]*t;

    //Position of the center in the world frame
    //TooN::Vector<3> CenterPos = TooN::makeVector(1,2,3);

    CenterPos = (OldFr->se3CfromW.inverse())*CenterPos_C;

    Daux.Velocity.x = (Daux.Center.x-CenterPos[0])/incTime;
    Daux.Velocity.y = (Daux.Center.y-CenterPos[1])/incTime;
    Daux.Velocity.z = (Daux.Center.z-CenterPos[2])/incTime;


    Daux.timeLastObserved = currtimestamp/1000000; //CurrFrame time
    TooN::Vector<4> Observation;
    Observation[0] = Daux.Center.x;
    Observation[1] = Daux.Center.y;
    Observation[2] = Daux.Center.z;
    Observation[3] = currtimestamp/1000000;
    Daux.lastObservations.push_back(Observation);
    DynObjs.push_back(Daux);
  }
  return DynObjs;
}

inline double DynamicObject::CalcDeterminant(TooN::Matrix<3> M)
{
  double D;
  D = M[0][0]*M[1][1]*M[2][2] + M[0][1]*M[1][2]*M[2][0] + M[1][0]*M[2][1]*M[0][2];
  D = D -  M[0][2]*M[1][1]*M[2][0] - M[0][1]*M[1][0]*M[2][2] - M[0][0]*M[1][2]*M[2][1];
  return D;
}

void DynamicObject::TrackDynObj(std::vector<DynObj> D)
{

  float timeTolerance = 60; //maximum time without detection before deleting object
  float timeTolerance2 = 5; //maximum time without detection if the object has only been detected once
  double TransThreshold = 1; //
  int n_list = -1;
  int n_act = -1;
  int n_add = 0;
  bool first = true;
  float distance;
  int i_aux = 0;
  float distance_tol = 1;

  float auxTime;
  float dist_min = 0;
  float pos1[3];
  float pos0[3];
  double vx, vy, vz;
  int obj_act_added[n_act];
  int eliminate[n_list];
  int n_eliminate;
  float inc_time = 0;

  n_list = (int)TrackedDynObj.size();
  n_act = (int)D.size();

  float actTime = D[0].timeLastObserved; //act time
  std::cout<< "Begin Tracking" << std::endl;
  if (n_list == 0 && n_act>0)
  {
    std::cout<< "No objects saved in previous list" << std::endl;
    TrackedDynObj = D;

    return;
    //    if (max_ID == 0)
    //      max_ID = str_RobotObjectList.s_RobotID*1000;
    //
    //    for (int k=0;k<(int)str_RobotObjectList.List.size();k++)
    //    {
    //      str_RobotObjectList.List[k].f_Identification = max_ID+1;
    //      max_ID++;
    //    }
    //    str_RobotObjectList.max_ID = max_ID;
  }
  else if (n_act > 0 && n_list > 0)
  {
    std::cout<< "Merge two lists" << std::endl;
    n_eliminate = 0;

    double pred_pos[n_list][3];
    double act_pos[n_act][3];
    float pairs[n_list * n_act][ 3]; //ind_pred, ind_act,distance //all the possible pairs
    float obj_pairs[(int)fmin(n_list, n_act)][3]; //ind_pred,ind_act,distance //selected pairs


    //calculate the distance between the pairs
    for(int i = 0;i<(int)TrackedDynObj.size();i++)
    {
      pos0[0] = TrackedDynObj[i].Center.x;
      pos0[1] = TrackedDynObj[i].Center.y;
      pos0[2] = TrackedDynObj[i].Center.z;
      vx = TrackedDynObj[i].Velocity.x;
      vy = TrackedDynObj[i].Velocity.y;
      vz = TrackedDynObj[i].Velocity.z;
      auxTime = TrackedDynObj[i].timeLastObserved;
      pred_pos[i][ 0] = pos0[0] + vx * (actTime - auxTime);
      pred_pos[i][ 1] = pos0[1] + vy * (actTime - auxTime);
      pred_pos[i][ 2] = pos0[2] + vz * (actTime - auxTime);
      for(int j = 0;j<(int)D.size();j++)
      {
        if (first)
        {
          obj_act_added[j] = 0; // all the objects marked as not added. later mark the one added with 1
          act_pos[j][0] = D[j].Center.x;
          act_pos[j][1] = D[j].Center.y;
          act_pos[j][2] = D[j].Center.z;
        }
        distance = (float)sqrt((double)(pow((double)(act_pos[j][ 0] - pred_pos[i][ 0]), 2) +
            pow((double)(act_pos[j][ 2] - pred_pos[i][ 2]), 2)));
        pairs[j + i * n_act][ 0] = i;//*it->f_Identification;
        pairs[j + i * n_act][ 1] = j;//*it2->f_Identification;
        pairs[j + i * n_act][ 2] = distance;
        j++;
      }
      first = false;
      i++;
    }
    //Order and select pairs
    dist_min = 100000;
    while (i_aux < fmin(n_list, n_act))
    {
      for (int i = 0; i < (n_act * n_list); i++)
      {
        if (pairs[i][ 2] < dist_min && pairs[i][2]>0)
        {
          obj_pairs[i_aux][ 0] = pairs[i][ 0];
          obj_pairs[i_aux][ 1] = pairs[i][ 1];
          obj_pairs[i_aux][ 2] = pairs[i][ 2];
          dist_min = pairs[i][ 2];
        }
      }
      for (int i = 0; i < (n_act * n_list); i++)
      {
        if (pairs[i][ 0] == obj_pairs[i_aux][ 0])
          pairs[i][ 2] = -1;
        if (pairs[i][ 1] == obj_pairs[i_aux][ 1])
          pairs[i][ 2] = -1;
      }
      dist_min = 100000;
      i_aux++;

    }
    //Check each pair to see if distance its acceptable and to predict new position

    for (int i = 0; i < fmin(n_act, n_list); i++)
    {
      if (obj_pairs[i][ 2] < distance_tol)
      {
        obj_act_added[(int)obj_pairs[i][1]] = 1;
        pos1[0] = D[(int)obj_pairs[i][ 1]].Center.x;
        pos1[1] = D[(int)obj_pairs[i][ 1]].Center.y;
        pos1[2] = D[(int)obj_pairs[i][ 1]].Center.z;

        pos0[0] = TrackedDynObj[(int)obj_pairs[i][ 0]].Center.x;
        pos0[1] = TrackedDynObj[(int)obj_pairs[i][ 0]].Center.y;
        pos0[2] = TrackedDynObj[(int)obj_pairs[i][ 0]].Center.z;

        auxTime = TrackedDynObj[(int)obj_pairs[i][ 0]].timeLastObserved;
        //Time Spent in seconds
        inc_time = (actTime-auxTime);

        vx = (pos1[0] - pos0[0]) / inc_time;
        vy = (pos1[1] - pos0[1]) / inc_time;
        vz = (pos1[2] - pos0[2]) / inc_time;

        TrackedDynObj[(int)obj_pairs[i][ 0]].Center.x = pos1[0];
        TrackedDynObj[(int)obj_pairs[i][ 0]].Center.y = pos1[1];
        TrackedDynObj[(int)obj_pairs[i][0]].Center.z = pos1[2];
        TrackedDynObj[(int)obj_pairs[i][ 0]].Velocity.x = vx;
        TrackedDynObj[(int)obj_pairs[i][ 0]].Velocity.y = vy;
        TrackedDynObj[(int)obj_pairs[i][ 0]].Velocity.z = vz;

        TrackedDynObj[(int)obj_pairs[i][ 0]].timeLastObserved = actTime;
        TooN::Vector<4> Observation;
        Observation[0] = pos1[0];
        Observation[1] = pos1[1];
        Observation[2] = pos1[2];
        Observation[3] = actTime;
        TrackedDynObj[(int)obj_pairs[i][ 0]].lastObservations.insert(TrackedDynObj[(int)obj_pairs[i][ 0]].lastObservations.begin(),Observation);
        while(TrackedDynObj[(int)obj_pairs[i][ 0]].lastObservations.size()>15)
        {
          TrackedDynObj[(int)obj_pairs[i][ 0]].lastObservations.pop_back();
        }
        // TrackedDynObj[(int)obj_pairs[i][ 0]].PhysCharac = str_RobotObjectList_act.List[(int)obj_pairs[i][ 1]].PhysCharac;
      }
    }
  }
  //Remove objects not seen or actualize position with last known velocity

  n_eliminate = 0;
  for (int i = 0; i < n_list; i++)
  {
    eliminate[i] = 0;

    if(TrackedDynObj[i].lastObservations.size()>1)
    {
      if ((actTime - TrackedDynObj[i].timeLastObserved) > timeTolerance)
      {
        eliminate[i] = 1;
        n_eliminate++;
        continue;
      }
      double sum_x =0,sum_y=0,sum_z = 0;
      //Check positions of the object in order to check if it has been moving
      for(int j = 0;j<(int)TrackedDynObj[i].lastObservations.size();j++)
      {
        sum_x = sum_x + TrackedDynObj[i].lastObservations[0][0];
        sum_y = sum_y + TrackedDynObj[i].lastObservations[0][1];
        sum_z = sum_z + TrackedDynObj[i].lastObservations[0][2];
      }
      if(abs(sum_x) < TransThreshold && abs(sum_y) < TransThreshold)
      {
        eliminate[i] = 1;
        n_eliminate++;
        continue;
      }
    }
    else
    {
      if ((actTime - TrackedDynObj[i].timeLastObserved) > timeTolerance2)
      {
        eliminate[i] = 1;
        n_eliminate++;
        continue;
      }
    }
  }
  //Eliminate the objects
  //Create auxiliary array of structures to save the objects that are not going to be eliminated
  i_aux = 0;
  for (int i = 0; i < n_act; i++)
  {
    if (obj_act_added[i] == 0)
      n_add++;
  }
  //DynObjects_aux = new cl_DynamicObjects((ushort)(n_list - n_eliminate + n_add));
  std::vector<DynObj> DynObjects_aux;
  DynObjects_aux.resize(n_list-n_eliminate+n_add);
  for (int i = 0; i < n_list; i++)
  {
    if (eliminate[i] == 0)
    {
      DynObjects_aux[i_aux] = TrackedDynObj[i];
      i_aux++;
    }
  }
  //Add the objects that have not been paired
  for (int i = 0; i < n_act; i++)
  {
    if (obj_act_added[i] == 0)
    {
      DynObjects_aux[i_aux] = D[i];
      //           max_ID++;
      i_aux++;
    }
  }
  TrackedDynObj.clear();
  TrackedDynObj = DynObjects_aux;


}








//OLD FUNCTIONS ////////////////////////////////////////////////////



void DynamicObject::NewFrameGrid(boost::shared_ptr<KeyFrame> CurrentFrame, std::vector<TooN::Vector<3> > &vTest)
{
  vTest.clear();

  if(OldFrame)
  {
    std::vector<StereoMeasurement> M;
    DynamicObject::FindCommonMeasurements(OldFrame, CurrentFrame, M);

    // Draw optical flow of static points...
    glPointSize(5);
    glLineWidth(2);
    glEnable(GL_POINT_SMOOTH);
    glEnable(GL_LINE_SMOOTH);
    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
    glEnable(GL_BLEND);
    glBegin(GL_LINES);
    for(int i = 0; i < (int)M.size(); ++i)
    {
      glColor3f(0,1,0);
      glVertex(M[i].v2ImPos1);
      glColor3f(0,0,1);
      glVertex(M[i].v2ImPos2);
    }
    glEnd();

    if(M.size() > 20)
    {
      //LookUp table for rows indexs in M
      //In image frame the first component (x) is the column and the second (y) is the row
      int GridPxRow;
      TooN::Vector<2> CenterPx;
      std::vector<std::vector<int> > RowLookUp(CameraSize[1]/mnPatchSize);
      for(int i=0; i< (int)M.size(); i++)
      {
        GridPxRow = (M[i].v2ImPos1[1]-(fmod(M[i].v2ImPos1[1],(double)mnPatchSize)))/(double)mnPatchSize;
        RowLookUp[GridPxRow].push_back(i);
      }

      //Loop through all the grid
      std::vector<std::vector<double> > MovementGrid(CameraSize[0]/mnPatchSize);
      //glPointSize(6); glBegin(GL_POINTS);
      for(int i = 0; i< (int)MovementGrid.size();i++)
      {
        MovementGrid[i].resize(CameraSize[1]/mnPatchSize);
        for(int j = 0; j< (int)MovementGrid[i].size(); j++)
        {
          CenterPx[0] = (i*mnPatchSize)+(mnPatchSize/2)-0.5; //row
          CenterPx[1] = (j*mnPatchSize)+(mnPatchSize/2)-0.5; //column

          //List of static points in rows near row i
          std::vector<StereoMeasurement> Mlist;
          int PatchDist = 1;
          while((int)Mlist.size()<3)
          {
            Mlist.clear();
            for (int k = j-PatchDist;k<(j+PatchDist);k++)
            {
              if (k>=0 && k<(int)RowLookUp.size())
              {
                for (int k2=0;k2<(int)RowLookUp[k].size();k2++)
                  Mlist.push_back(M[RowLookUp[k][k2]]);
              }
            }
            PatchDist++;
          }

          //Once the list has 3 or more elements, the interpolation can begin
          TooN::Vector<4> CenterPos = DynamicObject::Interpolation(Mlist, CenterPx, CurrentFrame );
          boost::tribool movement;
          if (CenterPos[3] == 0)
          {
            vTest.push_back(CenterPos.slice<0,3>());
            movement = DynamicObject::PotentialDynamicObject(OldFrame, CurrentFrame, CenterPx, CenterPos.slice<0,3>());
          }
          else
          {
            movement = false;
          }
          if (movement == true)
          {
            MovementGrid[i][j]= 1;
            //glColor3f(1,0,1);
            //glVertex(CenterPx);
          }
          else if (movement == false)
          {
            MovementGrid[i][j] = -1;
            //glColor3f(0,1,1);
            //glVertex(CenterPx);
          }
          else
          {
            MovementGrid[i][j] = 0;
            //glColor3f(1,1,1);
            //glVertex(CenterPx);
          }
        }
      }
      //glEnd();

      //Once the movementGrid is completed we check for objects
      /*std::vector<std::vector<double> > Objects;
      Objects = MatrixObjectFinder(MovementGrid);

      //Put all the pixels in a list
      std::vector<TooN::Vector<2> > PixelList;
      int GridPx[2];
      TooN::Vector<2> ImgPx;
      glColor3f(1,1,0);  glPointSize(6); glBegin(GL_POINTS);
      for(int i=0;i<(int)Objects.size();i++)
      {
        for(int j=0;j<(int)Objects[i].size();j++)
        {
          GridPx[1] = (int)fmod(Objects[i][j],CameraSize[1]/mnPatchSize);
          GridPx[0] = (int)(((Objects[i][j]-GridPx[1])/(CameraSize[1]/mnPatchSize)));
          ImgPx[0] = (GridPx[0]*mnPatchSize)+(mnPatchSize/2)-0.5; //row
          ImgPx[1] = (GridPx[1]*mnPatchSize)+(mnPatchSize/2)-0.5; //column
          PixelList.push_back(ImgPx);

                glVertex(ImgPx);

        }
      }
      glEnd();

      std::cout << PixelList.size() << std::endl;*/
    }
  }

  OldFrame = CurrentFrame;
}

//
//
//void DynamicObject::NewFrame2(boost::shared_ptr<KeyFrame> CurrentKeyFrame, boost::shared_ptr<KeyFrame> CurrentFrame, std::vector<TooN::Vector<3> > &vTest)
//{
//  vTest.clear();
//  CurrentFrame->aLevels[0].vCorners[1]; //TODOS LOS PUNTOS
//  std::cout << "Begin********************************************" << std::endl;
//  std::vector<StereoMeasurement> RealOpticalFlow;
//  std::vector<StereoMeasurement> ArtiOpticalFlow;
//  std::vector<StereoMeasurement> M;
//  //Compute Artificial Optical Flow
//  DynamicObject::FindCommonMeasurements(OldFrame, CurrentFrame, M);
//
//  cv::Mat CurrentFrameMat = cv::Mat(CameraSize[0],CameraSize[1], CV_8UC1,CurrentFrame->aLevels[0].im.data(),CurrentFrame->aLevels[0].im.row_stride());
//  if(OldFrame)
//  {
//    //Compute Optical Flow
//    //select points to calculate to frame
//    //calcOpticalFlowPyrLK()
//    std::vector<cv::Point2f> CurrFrameFeaturePos;
//    std::vector<unsigned char> status;
//    std::vector<float> err;
//    cv::Size winSize(15,15);
//    cv::TermCriteria criteria(cv::TermCriteria::COUNT + cv::TermCriteria::EPS, 30, 0.01);
//    cv::calcOpticalFlowPyrLK(OldFrameMat,CurrentFrameMat,OldFrameFeaturePos,CurrFrameFeaturePos,status,err,winSize,3,criteria,0.5,0);
//    std::cout << "Optical flow calculated with n points: " << OldFrameFeaturePos.size() <<std::endl;
//
//    //Draw optical Flow
//    glPointSize(5);
//    glLineWidth(2);
//    glEnable(GL_POINT_SMOOTH);
//    glEnable(GL_LINE_SMOOTH);
//    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
//    glEnable(GL_BLEND);
//    glBegin(GL_LINES);
//    std::vector<int> Points2Eliminate;
//    double ImgDistThreshold = 0.5;
//    double dist;
//    TooN::Vector<2> vec1,vec2;
//    for(int i=0;i<(int)OldFrameFeaturePos.size();i++)
//    {
//
//      vec1[0] = OldFrameFeaturePos[i].x;
//      vec1[1] = OldFrameFeaturePos[i].y;
//
//      vec2[0] = CurrFrameFeaturePos[i].x;
//      vec2[1] = CurrFrameFeaturePos[i].y;
//      dist = sqrt(pow(vec1[0]-vec2[0],2)+pow(vec1[1]-vec2[1],2));
//      if (dist<ImgDistThreshold*fmax(CameraSize[0],CameraSize[1]))
//      {
//        glColor3f(0,1,0);
//        glVertex(vec1);
//        glColor3f(0,0,1);
//        glVertex(vec2);
//      }
//      else
//      {
//        Points2Eliminate.push_back(i);
//      }
//    }
//    glEnd();
//    std::cout << "After drawing" << std::endl;
//    /*
//    for(int i=0;i<(int)Points2Eliminate.size();i++)
//    {
//      OldFrameFeaturePos.erase(OldFrameFeaturePos.begin()+Points2Eliminate[i]);
//      CurrFrameFeaturePos.erase(OldFrameFeaturePos.begin()+Points2Eliminate[i]);
//    }
//     */
//    std::cout << "After deleting" << std::endl;
//
//
//    //Updating
//    if(CurrentKeyFrame != OldKeyFrame) //is a new KeyFrame
//    {
//      std::cout << "New Key Frame" << std::endl;
//
//
//
//      CurrFrameFeaturePos.clear();
//      int NFeatures = 1000;
//      CurrFrameFeaturePos.resize(NFeatures);
//      std::partial_sort(CurrentKeyFrame->aLevels[0].vMaxCorners.begin(), CurrentKeyFrame->aLevels[0].vMaxCorners.begin()+NFeatures, CurrentKeyFrame->aLevels[0].vMaxCorners.end(), SortScores);
//      for(int i=0;i<NFeatures;i++)
//      {
//        CurrFrameFeaturePos[i].x = (float)CurrentKeyFrame->aLevels[0].vMaxCorners[i].first.x;
//        CurrFrameFeaturePos[i].y = (float)CurrentKeyFrame->aLevels[0].vMaxCorners[i].first.y;
//      }
//
//      /*
//      CurrFrameFeaturePos.resize(CurrentKeyFrame->aLevels[0].vCandidates.size());
//      for(int i=0;i<(int)CurrentKeyFrame->aLevels[0].vCandidates.size();i++)
//      {
//        CurrFrameFeaturePos[i].x = (float)CurrentKeyFrame->aLevels[0].vCandidates[i].irLevelPos.x;
//        CurrFrameFeaturePos[i].y = (float)CurrentKeyFrame->aLevels[0].vCandidates[i].irLevelPos.y;
//      }
//       */
//
//      OldKeyFrame = CurrentKeyFrame;
//      std::cout << "Key Frame obtained" << std::endl;
//    }
//    OldFrameFeaturePos = CurrFrameFeaturePos;
//  }
//  OldFrame = CurrentFrame;
//  OldFrameMat = CurrentFrameMat;
//}

void DynamicObject::CompareOpticalFlow(std::vector<std::vector<cv::Point2f> > OpticalFlows)
{
  //OpticalFlows has all the points
  //OpticalFlows[0] origin of real OF
  //OpticalFlows[1] end of real OF
  //OpticalFlows[2] origin of artificial OF
  //OpticalFlows[3] end of artificial OF
}

std::vector<std::vector<double> > DynamicObject::MatrixObjectFinder(std::vector<std::vector<double> > &M)
{
  std::set<float> OpenPx;
  std::set<float> ClosedPx;
  int n_pixels = 0;
  bool cond1 = false;
  bool cond2 = false;
  bool cond3 = false;
  int n_matrix = CameraSize[1]/mnPatchSize; //number of rows
  int m_matrix = CameraSize[0]/mnPatchSize; //number of columns
  std::vector<std::vector<double> > Objects;
  std::vector<std::vector<int> > MObjects;
  int label = 0;
  int u1 [] = { 0, 1, 1, 1, 0, -1, -1, -1 };
  int u2 [] = { 1, 1, 0, -1, -1, -1, 0, 1 };
  int pos_i, pos_j,pos_i_act,pos_j_act;
  MObjects.resize(m_matrix);
  for (int i = 0; i<m_matrix ;i++)
  {
    MObjects[i].resize(n_matrix);
    for(int j = 0; j< n_matrix; j++)
    {
      MObjects[i][j] = 0;
    }
  }

  for (int i = 0; i<m_matrix ;i++) //first columns
  {
    for(int j = 0; j< n_matrix; j++) //then rows
    {
      if(M[i][j] > 0 && MObjects[i][j]==0)
      {
        //new object found
        OpenPx.clear();
        ClosedPx.clear();
        double act_px = (double)((i-1)*m_matrix+j); //now the pixels are numbered from up to down
        label++;
        Objects.resize(label);
        OpenPx.insert(act_px);
        n_pixels = 0;
        while(OpenPx.size()>0)
        {
          double act_px = *OpenPx.begin();
          pos_j_act = (int)(fmod(act_px,(double)n_matrix));
          pos_i_act = (int)(((act_px-pos_j_act)/n_matrix));
          if (pos_j_act == 0)
          {pos_j_act = n_matrix;}
          //Surroundings check of the pixel
          for (int k = 0; k < 8; k++)
          {
            pos_i = pos_i_act + u1[k];
            pos_j = pos_j_act + u2[k];
            //check if it is between boundaries
            if ((pos_i > 0) && (pos_i <= m_matrix) && (pos_j > 0) && (pos_j <= n_matrix))
            {
              double aux_pos = (pos_i-1)*n_matrix+pos_j;
              cond1 = (OpenPx.count(aux_pos) > 0);
              cond2 = (ClosedPx.count(aux_pos) > 0);
              cond3 = (M[pos_i][pos_j] > 0);
              //check if forms part of the object
              if (!cond1 && !cond2 && !cond3)
              {
                //add to the open pixels list
                OpenPx.insert(aux_pos);
              }
            }
          }
          MObjects[pos_i_act][pos_j_act] = label;
          Objects[label].push_back(act_px);
          n_pixels++;
          ClosedPx.insert(act_px);
          OpenPx.erase(act_px);
        }

      }
    }

  }
  return Objects;
}




TooN::Vector<4> DynamicObject::Interpolation(std::vector<StereoMeasurement> &Mlist, TooN::Vector<2> CenterPx, boost::shared_ptr<KeyFrame> CurrentFrame)
{
  std::vector<TooN::Vector<2> > Distances;//Distance to each point
  Distances.resize(Mlist.size());
  for(int i=0; i< (int)Mlist.size(); i++)
  {
    Distances[i][0] = i;
    Distances[i][1] = sqrt(pow(Mlist[i].v2ImPos1[0]-CenterPx[0],2)+pow(Mlist[i].v2ImPos1[1]-CenterPx[1],2));
  }
  std::sort(Distances.begin(),Distances.end(),SortDistances);
  //Use first three elements in Distances to interpolate position of CenterPx
  std::vector<StereoMeasurement> IP;
  IP.resize(3); //Nearest three points that we will use to interpolate
  IP[0] = Mlist[Distances[0][0]];
  IP[1] = Mlist[Distances[1][0]];
  IP[2] = Mlist[Distances[2][0]];
  //First change everything into camera coordinates, where origin is 0,0,0:
  Matrix<3> IP_C;

  IP_C.T()[0] = (OldFrame->se3CfromW*IP[0].v3WorldPos);
  IP_C.T()[1] = (OldFrame->se3CfromW*IP[1].v3WorldPos);
  IP_C.T()[2] = (OldFrame->se3CfromW*IP[2].v3WorldPos);
  //Transform the pixel position to a vector in the camera frame
  TooN::Vector<3> CenterVec_C = TooN::unit(TooN::unproject(mCamera.UnProject(CVD::ir(CenterPx))));
  //Plane calculation Ax+By+Cz+1 = 0;
  double D = CalcDeterminant(IP_C);
  TooN::Matrix<3> auxMat = IP_C;
  auxMat[0] = TooN::Ones(3);
  double A = (-1/D)*CalcDeterminant(auxMat.T());
  auxMat = IP_C;
  auxMat[1] = TooN::Ones(3);
  double B = (-1/D)*CalcDeterminant(auxMat.T());
  auxMat = IP_C;
  auxMat[2] = TooN::Ones(3);
  double C = (-1/D)*CalcDeterminant(auxMat.T());
  //Calculate the intersection between the plane and the line
  //Ecuation of the line: x = xd*t;y=yd*t;z=zd*t with (xd,yd,zd) the center vector in the camera frame
  double t = -1/(A*CenterVec_C[0]+B*CenterVec_C[1]+C*CenterVec_C[2]);
  //Position of the center in the camera frmae
  TooN::Vector<3> CenterPos_C;
  CenterPos_C[0] = CenterVec_C[0]*t;
  CenterPos_C[1] = CenterVec_C[1]*t;
  CenterPos_C[2] = CenterVec_C[2]*t;
  //Position of the center in the world frame
  TooN::Vector<4> CenterPos = TooN::makeVector(1,2,3,4);
  CenterPos.slice<0,3>() = (OldFrame->se3CfromW.inverse())*CenterPos_C;
  //Evaluate the point obtained by interpolation

  //Depth of the CenterPos in camera frame
  double CenterDepth = CenterPos_C[2];
  //Distance from the CenterPos to the points used to calculate the interpolation
  TooN::Vector<3> Dist2IP;
  Dist2IP[0] = sqrt(pow(CenterPos[0]-IP[0].v3WorldPos[0],2)+pow(CenterPos[1]-IP[0].v3WorldPos[1],2)+pow(CenterPos[2]-IP[0].v3WorldPos[2],2));
  Dist2IP[1] = sqrt(pow(CenterPos[0]-IP[1].v3WorldPos[0],2)+pow(CenterPos[1]-IP[1].v3WorldPos[1],2)+pow(CenterPos[2]-IP[1].v3WorldPos[2],2));
  Dist2IP[2] = sqrt(pow(CenterPos[0]-IP[2].v3WorldPos[0],2)+pow(CenterPos[1]-IP[2].v3WorldPos[1],2)+pow(CenterPos[2]-IP[2].v3WorldPos[2],2));
  double RatioThreshold = 0.5;
  double DepthThresholdMax = CurrentFrame->dSceneDepthMean+2*CurrentFrame->dSceneDepthSigma;
  double DepthThresholdMin = CurrentFrame->dSceneDepthMean-2*CurrentFrame->dSceneDepthSigma;
  if (abs(Dist2IP[0]/CenterDepth)>RatioThreshold || abs(Dist2IP[1]/CenterDepth)>RatioThreshold || abs(Dist2IP[2]/CenterDepth)>RatioThreshold)
  {
    CenterPos[3] = -1;
  //  std::cout << "Ratio" << abs(Dist2IP[0]/CenterDepth) << " " << abs(Dist2IP[1]/CenterDepth) << " " << abs(Dist2IP[2]/CenterDepth) <<std::endl;
  }
  else if (CenterDepth > DepthThresholdMax || CenterDepth < DepthThresholdMin)
  {
    CenterPos[3] = -1;
  }
  else
    CenterPos[3] = 0;

  return CenterPos;
/*

  TooN::Matrix<3,3> Points;
  Points[0][0] = IP[0].v3WorldPos[0]; Points[0][1] = IP[1].v3WorldPos[0]; Points[0][2] = IP[2].v3WorldPos[0];
  Points[1][0] = IP[0].v3WorldPos[1]; Points[1][1] = IP[1].v3WorldPos[1]; Points[1][2] = IP[2].v3WorldPos[1];
  Points[2][0] = IP[0].v3WorldPos[2]; Points[2][1] = IP[1].v3WorldPos[2]; Points[2][2] = IP[2].v3WorldPos[2];
  double D = CalcDeterminant(Points);
  TooN::Matrix<3,3> auxMat = TooN::Ones;
  auxMat[0] = TooN::makeVector(IP[0].v3WorldPos);
  double a = (-1/D)*CalcDeterminant(auxMat.T());
  auxMat = TooN::Ones;
  auxMat[1] = TooN::makeVector(IP[1].v3WorldPos);
  double b = (-1/D)*CalcDeterminant(auxMat.T());
  auxMat = TooN::Ones;
  auxMat[2] = TooN::makeVector(IP[2].v3WorldPos);
  double c = (-1/D)*CalcDeterminant(auxMat.T());
  TooN::Vector<3> Pos12,Pos23,Pos13;
  int p1,p2;
  //Using 1 and 2
  p1 = 0;  p2 = 1;
  Pos12[0] = IP[p2].v3WorldPos[0]-((IP[p2].v2ImPos1[0]-CenterPx[0])/(IP[p2].v2ImPos1[0]-IP[p1].v2ImPos1[0]))*(IP[p2].v3WorldPos[0]-IP[p1].v3WorldPos[0]);
  Pos12[1] = IP[p2].v3WorldPos[1]-((IP[p2].v2ImPos1[1]-CenterPx[1])/(IP[p2].v2ImPos1[1]-IP[p1].v2ImPos1[1]))*(IP[p2].v3WorldPos[1]-IP[p1].v3WorldPos[1]);
  Pos12[2] = (-1-a*Pos12[0]-b*Pos12[1])/c;
  //Using 2 and 3
  p1 = 1;  p2 = 2;
  Pos23[0] = IP[p2].v3WorldPos[0]-((IP[p2].v2ImPos1[0]-CenterPx[0])/(IP[p2].v2ImPos1[0]-IP[p1].v2ImPos1[0]))*(IP[p2].v3WorldPos[0]-IP[p1].v3WorldPos[0]);
  Pos23[1] = IP[p2].v3WorldPos[1]-((IP[p2].v2ImPos1[1]-CenterPx[1])/(IP[p2].v2ImPos1[1]-IP[p1].v2ImPos1[1]))*(IP[p2].v3WorldPos[1]-IP[p1].v3WorldPos[1]);
  Pos23[2] = (-1-a*Pos23[0]-b*Pos23[1])/c;
  //Using 1 and 3
  p1 = 0;  p2 = 2;
  Pos13[0] = IP[p2].v3WorldPos[0]-((IP[p2].v2ImPos1[0]-CenterPx[0])/(IP[p2].v2ImPos1[0]-IP[p1].v2ImPos1[0]))*(IP[p2].v3WorldPos[0]-IP[p1].v3WorldPos[0]);
  Pos13[1] = IP[p2].v3WorldPos[1]-((IP[p2].v2ImPos1[1]-CenterPx[1])/(IP[p2].v2ImPos1[1]-IP[p1].v2ImPos1[1]))*(IP[p2].v3WorldPos[1]-IP[p1].v3WorldPos[1]);
  Pos13[2] = (-1-a*Pos13[0]-b*Pos13[1])/c;
*/

}



void DynamicObject::FindCommonMeasurements(boost::shared_ptr<KeyFrame> kf1, boost::shared_ptr<KeyFrame> kf2, std::vector<StereoMeasurement> &meas)
{
  meas.clear();

  for(std::map<boost::shared_ptr<MapPoint>, Measurement>::iterator it = kf1->mMeasurements.begin(); it != kf1->mMeasurements.end(); ++it)
  {
    std::map<boost::shared_ptr<MapPoint>, Measurement>::iterator match = kf2->mMeasurements.find(it->first);
    if(match != kf2->mMeasurements.end())
    {
      StereoMeasurement s;
      s.v2ImPos1 = it->second.v2RootPos;
      s.v2ImPos2 = match->second.v2RootPos;
      s.v3WorldPos = it->first->v3WorldPos;
      meas.push_back(s);
    }
  }
}

boost::tribool DynamicObject::PotentialDynamicObject(boost::shared_ptr<KeyFrame> kf1, boost::shared_ptr<KeyFrame> kf2, TooN::Vector<2> v2ImPos1, TooN::Vector<3> v3WorldPos)
{
  Vector<3> v3Normal_NC = makeVector(0, 0,-1);
  Vector<3> v3Center_NC = unit(unproject(mCamera.UnProject(v2ImPos1)));
  Vector<3> v3OneDownFromCenter_NC = unit(unproject(mCamera.UnProject(v2ImPos1 + makeVector(0.0,1.0))));
  Vector<3> v3OneRightFromCenter_NC = unit(unproject(mCamera.UnProject(v2ImPos1 + makeVector(1.0,0.0))));
  
  // Find patch pos in KF camera coords
  // Actually this might not exactly correspond to the patch pos!
  // Treat it as a general point on the plane.
  Vector<3> v3PlanePoint_C = kf1->se3CfromW * v3WorldPos;
  
  // Find the height of this above the plane.
  // Assumes the normal is  pointing toward the camera.
  double dCamHeight = fabs(v3PlanePoint_C * v3Normal_NC);

  double dPixelRate = fabs(v3Center_NC * v3Normal_NC);
  double dOneRightRate = fabs(v3OneRightFromCenter_NC * v3Normal_NC);
  double dOneDownRate = fabs(v3OneDownFromCenter_NC * v3Normal_NC);
  
  // Find projections onto plane
  Vector<3> v3CenterOnPlane_C = v3Center_NC * dCamHeight / dPixelRate;
  Vector<3> v3OneRightOnPlane_C = v3OneRightFromCenter_NC * dCamHeight / dOneRightRate;
  Vector<3> v3OneDownOnPlane_C = v3OneDownFromCenter_NC * dCamHeight / dOneDownRate;
  
  // Find differences of these projections in the world frame
  Vector<3> v3PixelRight_W = kf1->se3CfromW.get_rotation().inverse() * (v3OneRightOnPlane_C - v3CenterOnPlane_C);
  Vector<3> v3PixelDown_W = kf1->se3CfromW.get_rotation().inverse() * (v3OneDownOnPlane_C - v3CenterOnPlane_C);

  if(!Project(kf2->se3CfromW, v3WorldPos))
    return boost::indeterminate;   

  double dOneOverCameraZ = 1.0 / mv3Cam[2];
  // Project the source keyframe's one-pixel-right and one-pixel-down vectors into the current view
  Vector<3> v3MotionRight = kf2->se3CfromW.get_rotation() * v3PixelRight_W;
  Vector<3> v3MotionDown = kf2->se3CfromW.get_rotation() * v3PixelDown_W;

  // Calculate in-image derivatives of source image pixel motions:
  mm2WarpInverse.T()[0] = m2CamDerivs * (v3MotionRight.slice<0,2>() - mv3Cam.slice<0,2>() * v3MotionRight[2] * dOneOverCameraZ) * dOneOverCameraZ;
  mm2WarpInverse.T()[1] = m2CamDerivs * (v3MotionDown.slice<0,2>() - mv3Cam.slice<0,2>() * v3MotionDown[2] * dOneOverCameraZ) * dOneOverCameraZ;
  double dDet = mm2WarpInverse[0][0] * mm2WarpInverse[1][1] - mm2WarpInverse[0][1] * mm2WarpInverse[1][0];
  
  // Some warps are inappropriate, e.g. too near the camera, too far, or reflected, 
  // or zero area.. reject these!
  if(dDet > 3 || dDet < 0.25)
    return boost::indeterminate;

  // Get the warping matrix appropriate for use with CVD::transform...
  Matrix<2> m2 = M2Inverse(mm2WarpInverse); 
  
  if(CVD::transform(kf1->aLevels[0].im, mimTemplate, m2, v2ImPos1, vec(mirCenter))) 
    return boost::indeterminate; // Transformed patch partially/totally outside new image.
  
  MakeTemplateSums();

  int nSSD = ZMSSDAtPoint(kf2->aLevels[0].im, ir(mv2Image));

  if(nSSD < mnMaxSSD)
    return false;
  else
    return true;
};

inline bool DynamicObject::Project(const SE3<> &se3CFromW, TooN::Vector<3> v3WorldPos)
{
  mv3Cam = se3CFromW * v3WorldPos;

  if(mv3Cam[2] < 0.001)
    return false;

  Vector<2> v2ImPlane = project(mv3Cam);
  if(v2ImPlane*v2ImPlane > mCamera.LargestRadiusInImage() * mCamera.LargestRadiusInImage())
    return false;

  mv2Image = mCamera.Project(v2ImPlane);
  if(mCamera.Invalid())
    return false;

  if(mv2Image[0] < 0 || mv2Image[1] < 0 || mv2Image[0] > mCamera.GetImageSize()[0] || mv2Image[1] > mCamera.GetImageSize()[1])
    return false;
  else
  {
    m2CamDerivs = mCamera.GetProjectionDerivs();
    return true;
  }
}

// Finds the sum, and sum-squared, of template pixels. These sums are used
// to calculate the ZMSSD.
inline void DynamicObject::MakeTemplateSums()
{
  int nSum = 0;
  int nSumSq = 0;
  ImageRef ir;
  do
  {
    int b = mimTemplate[ir];
    nSum += b;
    nSumSq +=b * b;
  }      
  while(ir.next(mimTemplate.size()));
  mnTemplateSum = nSum;
  mnTemplateSumSq = nSumSq;
}

/////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////
// 
//
//              ZMSSDatpoint, which is SSE optimised, follows
//
// The top version is the SSE version for 8x8 patches. It is compiled
// only if CVD_HAVE_XMMINTRIN is true, also you need to give your 
// compiler the appropriate flags (e.g. -march=core2 -msse3 for g++.)
// The standard c++ version, which is about half as quick (not a disaster
// by any means) is below.
//
// The 8x8 SSE version looks long because it has been unrolled, 
// it just does the same thing eight times. Both versions are one-pass
// and need pre-calculated template sums and sum-squares.
//
/////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////

#if CVD_HAVE_XMMINTRIN
// Horizontal sum of uint16s stored in an XMM register
inline int SumXMM_16(__m128i &target)
{
  unsigned short int sums_store[8];    
  _mm_storeu_si128((__m128i*)sums_store, target);
  return sums_store[0] + sums_store[1] + sums_store[2] + sums_store[3] +
    sums_store[4] + sums_store[5] + sums_store[6] + sums_store[7];
}
// Horizontal sum of uint32s stored in an XMM register
inline int SumXMM_32(__m128i &target)
{
  unsigned int sums_store[4];    
  _mm_storeu_si128((__m128i*)sums_store, target);
  return sums_store[0] + sums_store[1] + sums_store[2] + sums_store[3];
}
#endif

// Calculate the Zero-mean SSD of the coarse patch and a target imate at a specific 
// point.
int DynamicObject::ZMSSDAtPoint(CVD::BasicImage<CVD::byte> &im, const CVD::ImageRef &ir)
{
  if(!im.in_image_with_border(ir, mirCenter[0]))
    return mnMaxSSD + 1;
  
  ImageRef irImgBase = ir - mirCenter;
  byte *imagepointer;
  byte *templatepointer;
  
  int nImageSumSq = 0;
  int nImageSum = 0;
  int nCrossSum = 0;

#if CVD_HAVE_XMMINTRIN
  if(mnPatchSize == 8)
    {
      long unsigned int imagepointerincrement;

      __m128i xImageAsEightBytes;
      __m128i xImageAsWords;
      __m128i xTemplateAsEightBytes;
      __m128i xTemplateAsWords;
      __m128i xZero;
      __m128i xImageSums; // These sums are 8xuint16
      __m128i xImageSqSums; // These sums are 4xint32
      __m128i xCrossSums;   // These sums are 4xint32
      __m128i xProduct;

      
      xImageSums = _mm_setzero_si128();
      xImageSqSums = _mm_setzero_si128();
      xCrossSums = _mm_setzero_si128();
      xZero = _mm_setzero_si128();
      
      imagepointer = &im[irImgBase + ImageRef(0,0)];
      templatepointer = &mimTemplate[ImageRef(0,0)];
      imagepointerincrement = &im[irImgBase + ImageRef(0,1)] - imagepointer;
      
      xImageAsEightBytes=_mm_loadl_epi64((__m128i*) imagepointer);
      imagepointer += imagepointerincrement;
      xImageAsWords = _mm_unpacklo_epi8(xImageAsEightBytes,xZero);
      xImageSums = _mm_adds_epu16(xImageAsWords,xImageSums);
      xProduct = _mm_madd_epi16(xImageAsWords, xImageAsWords);
      xImageSqSums = _mm_add_epi32(xProduct, xImageSqSums);
      xTemplateAsEightBytes=_mm_load_si128((__m128i*) templatepointer);
      templatepointer += 16;
      xTemplateAsWords = _mm_unpacklo_epi8(xTemplateAsEightBytes,xZero);
      xProduct = _mm_madd_epi16(xImageAsWords, xTemplateAsWords);
      xCrossSums = _mm_add_epi32(xProduct, xCrossSums);
      xImageAsEightBytes=_mm_loadl_epi64((__m128i*) imagepointer);
      imagepointer += imagepointerincrement;
      xImageAsWords = _mm_unpacklo_epi8(xImageAsEightBytes,xZero);
      xImageSums = _mm_adds_epu16(xImageAsWords,xImageSums);
      xProduct = _mm_madd_epi16(xImageAsWords, xImageAsWords);
      xImageSqSums = _mm_add_epi32(xProduct, xImageSqSums);
      xTemplateAsWords = _mm_unpackhi_epi8(xTemplateAsEightBytes,xZero);
      xProduct = _mm_madd_epi16(xImageAsWords, xTemplateAsWords);
      xCrossSums = _mm_add_epi32(xProduct, xCrossSums);

      xImageAsEightBytes=_mm_loadl_epi64((__m128i*) imagepointer);
      imagepointer += imagepointerincrement;
      xImageAsWords = _mm_unpacklo_epi8(xImageAsEightBytes,xZero);
      xImageSums = _mm_adds_epu16(xImageAsWords,xImageSums);
      xProduct = _mm_madd_epi16(xImageAsWords, xImageAsWords);
      xImageSqSums = _mm_add_epi32(xProduct, xImageSqSums);
      xTemplateAsEightBytes=_mm_load_si128((__m128i*) templatepointer);
      templatepointer += 16;
      xTemplateAsWords = _mm_unpacklo_epi8(xTemplateAsEightBytes,xZero);
      xProduct = _mm_madd_epi16(xImageAsWords, xTemplateAsWords);
      xCrossSums = _mm_add_epi32(xProduct, xCrossSums);
      xImageAsEightBytes=_mm_loadl_epi64((__m128i*) imagepointer);
      imagepointer += imagepointerincrement;
      xImageAsWords = _mm_unpacklo_epi8(xImageAsEightBytes,xZero);
      xImageSums = _mm_adds_epu16(xImageAsWords,xImageSums);
      xProduct = _mm_madd_epi16(xImageAsWords, xImageAsWords);
      xImageSqSums = _mm_add_epi32(xProduct, xImageSqSums);
      xTemplateAsWords = _mm_unpackhi_epi8(xTemplateAsEightBytes,xZero);
      xProduct = _mm_madd_epi16(xImageAsWords, xTemplateAsWords);
      xCrossSums = _mm_add_epi32(xProduct, xCrossSums);

      xImageAsEightBytes=_mm_loadl_epi64((__m128i*) imagepointer);
      imagepointer += imagepointerincrement;
      xImageAsWords = _mm_unpacklo_epi8(xImageAsEightBytes,xZero);
      xImageSums = _mm_adds_epu16(xImageAsWords,xImageSums);
      xProduct = _mm_madd_epi16(xImageAsWords, xImageAsWords);
      xImageSqSums = _mm_add_epi32(xProduct, xImageSqSums);
      xTemplateAsEightBytes=_mm_load_si128((__m128i*) templatepointer);
      templatepointer += 16;
      xTemplateAsWords = _mm_unpacklo_epi8(xTemplateAsEightBytes,xZero);
      xProduct = _mm_madd_epi16(xImageAsWords, xTemplateAsWords);
      xCrossSums = _mm_add_epi32(xProduct, xCrossSums);
      xImageAsEightBytes=_mm_loadl_epi64((__m128i*) imagepointer);
      imagepointer += imagepointerincrement;
      xImageAsWords = _mm_unpacklo_epi8(xImageAsEightBytes,xZero);
      xImageSums = _mm_adds_epu16(xImageAsWords,xImageSums);
      xProduct = _mm_madd_epi16(xImageAsWords, xImageAsWords);
      xImageSqSums = _mm_add_epi32(xProduct, xImageSqSums);
      xTemplateAsWords = _mm_unpackhi_epi8(xTemplateAsEightBytes,xZero);
      xProduct = _mm_madd_epi16(xImageAsWords, xTemplateAsWords);
      xCrossSums = _mm_add_epi32(xProduct, xCrossSums);

      xImageAsEightBytes=_mm_loadl_epi64((__m128i*) imagepointer);
      imagepointer += imagepointerincrement;
      xImageAsWords = _mm_unpacklo_epi8(xImageAsEightBytes,xZero);
      xImageSums = _mm_adds_epu16(xImageAsWords,xImageSums);
      xProduct = _mm_madd_epi16(xImageAsWords, xImageAsWords);
      xImageSqSums = _mm_add_epi32(xProduct, xImageSqSums);
      xTemplateAsEightBytes=_mm_load_si128((__m128i*) templatepointer);
      templatepointer += 16;
      xTemplateAsWords = _mm_unpacklo_epi8(xTemplateAsEightBytes,xZero);
      xProduct = _mm_madd_epi16(xImageAsWords, xTemplateAsWords);
      xCrossSums = _mm_add_epi32(xProduct, xCrossSums);
      xImageAsEightBytes=_mm_loadl_epi64((__m128i*) imagepointer);
      xImageAsWords = _mm_unpacklo_epi8(xImageAsEightBytes,xZero);
      xImageSums = _mm_adds_epu16(xImageAsWords,xImageSums);
      xProduct = _mm_madd_epi16(xImageAsWords, xImageAsWords);
      xImageSqSums = _mm_add_epi32(xProduct, xImageSqSums);
      xTemplateAsWords = _mm_unpackhi_epi8(xTemplateAsEightBytes,xZero);
      xProduct = _mm_madd_epi16(xImageAsWords, xTemplateAsWords);
      xCrossSums = _mm_add_epi32(xProduct, xCrossSums);

      nImageSum = SumXMM_16(xImageSums);
      nCrossSum = SumXMM_32(xCrossSums);
      nImageSumSq = SumXMM_32(xImageSqSums);
    }
  else
#endif 
    {    
      for(int nRow = 0; nRow < mnPatchSize; nRow++)
	{
	  imagepointer = &im[irImgBase + ImageRef(0,nRow)];
	  templatepointer = &mimTemplate[ImageRef(0,nRow)];
	  for(int nCol = 0; nCol < mnPatchSize; nCol++)
	    {
	      int n = imagepointer[nCol];
	      nImageSum += n;
	      nImageSumSq += n*n;
	      nCrossSum += n * templatepointer[nCol];
	    };
	}
    };
  
  int SA = mnTemplateSum;
  int SB = nImageSum;
  
  int N = mnPatchSize * mnPatchSize;
  return ((2*SA*SB - SA*SA - SB*SB)/N + nImageSumSq + mnTemplateSumSq - 2*nCrossSum);
}






