#include <ace/config.h>
#include <iostream>
#include <vector>
#include <string>
#include <algorithm>
#include <sstream>
#include <fstream>
#include <sys/stat.h>

#include <yarp/sig/all.h>
#include <yarp/os/all.h>
#include <yarp/dev/all.h>

#include <cv.h>
#include <cvaux.h>
#include <highgui.h>

#define MAX_COUNT 100
#define WIN_SIZE 10
#define FLOW_RATIO 80
#define FEAT_QLTY 0.01
#define FEAT_MIN_DIST 5.0
#define TRESHOLD 40
#define NUM_X 9
#define NUM_Y 6
#define WAIT_TIME 100
#define DEF 0
#define PI 3.14159265

using namespace std;

using namespace yarp::dev;
using namespace yarp::os;
using namespace yarp::sig;
using namespace yarp::sig::draw;
using namespace yarp::sig::file;

bool fileExists(string strFilename);

int main(int argc, char *argv[])
{  
  ImageOf<PixelRgb> tempYImage;
  IplImage *tempCImage;    
  IplImage *leftImg, *rightImg, *leftGray, *rightGray;
  IplImage *gray;
  IplImage *result3DImage, *imageX, *imageY, *imageZ;
  IplImage *result2DPoints;
  int displayCorners = 0;
  int showUndistorted = 1;
  const int maxScale = 1;
  const float squareSize = 24.0;
  int n, N;
  int count;
  int result;
  int i, j, k;
  int nframes;
  CvSize imageSize = {0, 0};
  
  vector<CvPoint3D32f> objectPoints;
  vector<CvPoint2D32f> points[2];
  vector<int> npoints;
  vector<uchar> active[2];
  vector<CvPoint2D32f> temp;
  
  bool flag;
  long begin;
  istringstream dataElement;
  
  CvStereoBMState *bmState = cvCreateStereoBMState();
  
  double m1[3][3], m2[3][3], d1[5], d2[5], rot[3][3], trans[3], ess[3][3], fund[3][3];
  double rotX[3][3], rotY[3][3], rotZ[3][3];
  double r1[3][3], r2[3][3], p1[3][4], p2[3][4], q[4][4];
  double d1Zeros[5], d2Zeros[5];
  CvMat _m1 = cvMat(3, 3, CV_64F, m1);
  CvMat _m2 = cvMat(3, 3, CV_64F, m2);
  CvMat _d1 = cvMat(1, 5, CV_64F, d1);
  CvMat _d2 = cvMat(1, 5, CV_64F, d2);
  CvMat _rot = cvMat(3, 3, CV_64F, rot);
  CvMat _rotX = cvMat(3, 3, CV_64F, rotX);
  CvMat _rotY = cvMat(3, 3, CV_64F, rotY);
  CvMat _rotZ = cvMat(3, 3, CV_64F, rotZ);
  CvMat _trans = cvMat(3, 1, CV_64F, trans);
  CvMat _ess = cvMat(3, 3, CV_64F, ess);
  CvMat _fund = cvMat(3, 3, CV_64F, fund);
  CvMat _r1 = cvMat(3, 3, CV_64F, r1);
  CvMat _r2 = cvMat(3, 3, CV_64F, r2);
  CvMat _p1 = cvMat(3, 4, CV_64F, p1);
  CvMat _p2 = cvMat(3, 4, CV_64F, p2);
  CvMat _q = cvMat(4, 4, CV_64F, q);
  CvMat _d1Zeros = cvMat(1, 5, CV_64F, d1Zeros);
  CvMat _d2Zeros = cvMat(1, 5, CV_64F, d2Zeros);
  
  
  CvMat part;
  
  ofstream disparityFile;
  
  n = NUM_X * NUM_Y;
  N = 0;
  temp.resize(n, cvPoint2D32f(0, 0));
  
  cvNamedWindow("OriginalLeft", CV_WINDOW_AUTOSIZE);
  cvNamedWindow("GrayLeft", CV_WINDOW_AUTOSIZE);
  cvNamedWindow("OriginalRight", CV_WINDOW_AUTOSIZE);
  cvNamedWindow("GrayRight", CV_WINDOW_AUTOSIZE);
  cvNamedWindow("Disparity", CV_WINDOW_AUTOSIZE);
  
  yarp::sig::file::read(tempYImage, "../../../3dVisionDatas/calibration/left1.ppm");
      
  // Convert the image in a openCv image
  tempCImage = cvCreateImage(cvSize(tempYImage.width(),  
			     tempYImage.height()), 
			     IPL_DEPTH_8U, 3 );
  
  cvCvtColor((IplImage*)tempYImage.getIplImage(), tempCImage, CV_RGB2BGR);
  
  gray = cvCreateImage(cvGetSize(tempCImage), 8, 1);
  
  imageSize = cvGetSize(tempCImage);
  
  cvCvtColor(tempCImage, gray, CV_BGR2GRAY);
  
  CvMat *mx1 = cvCreateMat(imageSize.height, imageSize.width, CV_32F);
  CvMat *my1 = cvCreateMat(imageSize.height, imageSize.width, CV_32F);
  CvMat *mx2 = cvCreateMat(imageSize.height, imageSize.width, CV_32F);
  CvMat *my2 = cvCreateMat(imageSize.height, imageSize.width, CV_32F);
  
  IplImage *mapXDis = cvCreateImage(imageSize, IPL_DEPTH_32F, 1);
  IplImage *mapYDis = cvCreateImage(imageSize, IPL_DEPTH_32F, 1);
  IplImage *mapXDis2 = cvCreateImage(imageSize, IPL_DEPTH_32F, 1);
  IplImage *mapYDis2 = cvCreateImage(imageSize, IPL_DEPTH_32F, 1);
  IplImage *tempDistImage;
  
  cvSetIdentity(&_m1);
  cvSetIdentity(&_m2);
  cvZero(&_d1);
  cvZero(&_d2);
  
  m1[0][0] = 258.245;
  m1[1][1] = 257.123;
  m1[0][2] = 157.631;
  m1[1][2] = 113.148;
  m2[0][0] = 257.2;
  m2[1][1] = 257.436;
  m2[0][2] = 155.086;
  m2[1][2] = 130.77;
  
  d1[0] = -0.425346;
  d1[1] = 0.146858;
  d1[2] = -0.00271551;
  d1[3] = 0.00490219;
  d1[4] = 0;
  d2[0] = -0.41324;
  d2[1] = 0.158982;
  d2[2] = -0.00148367;
  d2[3] = 0.000609254;
  d2[4] = 0;
  
  cvInitUndistortMap(&_m1, &_d1, mapXDis, mapYDis);
  cvInitUndistortMap(&_m2, &_d2, mapXDis2, mapYDis2);
  
  //left eye chessboard extraction
  i = 0;
  do
  { 
    result = 0;
    count = 0;
    i++;
    cout << i << " Lettura immagine sinistra -";
    // Read a yarp image from file
    begin = dataElement.tellg();
    ostringstream iStrStream ;
    iStrStream << "../../../3dVisionDatas/calibration/left" << i << ".ppm";
    cout << " Controllo esistenza file -";
    flag = fileExists(iStrStream.str().c_str());
    iStrStream.seekp(begin);
    
    if(flag == true)
    {
      cout << " Lettura file" << endl;
      yarp::sig::file::read(tempYImage, iStrStream.str().c_str());
      
      // Convert the image in a openCv image
      tempCImage = cvCreateImage(cvSize(tempYImage.width(),  
					 tempYImage.height()), 
					 IPL_DEPTH_8U, 3 );
  
      cvCvtColor((IplImage*)tempYImage.getIplImage(), tempCImage, CV_RGB2BGR);
      
      tempDistImage = cvCloneImage(tempCImage);
      cvRemap(tempDistImage, tempCImage, mapXDis, mapYDis);
      
      cvCvtColor(tempCImage, gray, CV_BGR2GRAY);
 
      //find the chessboard inside hte image
      result = cvFindChessboardCorners(tempCImage, cvSize(NUM_X, NUM_Y), &temp[0], &count, CV_CALIB_CB_ADAPTIVE_THRESH | CV_CALIB_CB_NORMALIZE_IMAGE);
      
      //draw the chessboard points on the image
      cvDrawChessboardCorners(tempCImage, cvSize(NUM_X, NUM_Y), &temp[0], count, result);
      
      //find the subpix values
      N = points[0].size();
      points[0].resize(N + n, cvPoint2D32f(0, 0));
      active[0].push_back((uchar) result);
      
      if(result)
      {
	cvFindCornerSubPix(gray, &temp[0], count, cvSize(11, 11), cvSize(-1, -1), cvTermCriteria(CV_TERMCRIT_ITER + CV_TERMCRIT_EPS, 30, 0.01));
	copy(temp.begin(), temp.end(), points[0].begin() + N);
      }
      
      // Show the openCv image
      cvShowImage("OriginalLeft", tempCImage);
      cvShowImage("GrayLeft", gray);
	
      // Wait
      cvWaitKey(WAIT_TIME);
    }
  }
  while(flag == true);
  cout << "Fine" << endl;
  
  //right eye chessboard extraction
  i = 0;
  do
  { 
    result = 0;
    count = 0;
    i++;
    cout << i << " Lettura immagine destra -";
    // Read a yarp image from file
    begin = dataElement.tellg();
    ostringstream iStrStream ;
    iStrStream << "../../../3dVisionDatas/calibration/right" << i << ".ppm";
    cout << " Controllo esistenza file -";
    flag = fileExists(iStrStream.str().c_str());
    iStrStream.seekp(begin);
      
    if(flag == true)
    {
      cout << " Lettura file" << endl;
      yarp::sig::file::read(tempYImage, iStrStream.str().c_str());
      
      // Convert the image in a openCv image
      tempCImage = cvCreateImage(cvSize(tempYImage.width(),  
					 tempYImage.height()), 
					 IPL_DEPTH_8U, 3 );
  
      cvCvtColor((IplImage*)tempYImage.getIplImage(), tempCImage, CV_RGB2BGR);
      
      // Undistortion
  
      tempDistImage = cvCloneImage(tempCImage);
      cvRemap(tempDistImage, tempCImage, mapXDis2, mapYDis2);

      cvCvtColor(tempCImage, gray, CV_BGR2GRAY);
      
      //find sthe chessboard inside hte image
      result = cvFindChessboardCorners(tempCImage, cvSize(NUM_X, NUM_Y), &temp[0], &count, CV_CALIB_CB_ADAPTIVE_THRESH | CV_CALIB_CB_NORMALIZE_IMAGE);
      
      //draw the chessboard points on the image
      cvDrawChessboardCorners(tempCImage, cvSize(NUM_X, NUM_Y), &temp[0], count, result);
      
      //find the subpix values
      N = points[1].size();
      points[1].resize(N + n, cvPoint2D32f(0, 0));
      active[1].push_back((uchar) result);
      
      if(result)
      {
	cvFindCornerSubPix(gray, &temp[0], count, cvSize(11, 11), cvSize(-1, -1), cvTermCriteria(CV_TERMCRIT_ITER + CV_TERMCRIT_EPS, 30, 0.01));
	copy(temp.begin(), temp.end(), points[1].begin() + N);
      }
      
      // Show the openCv image
      cvShowImage("OriginalRight", tempCImage);
      cvShowImage("GrayRight", gray);
	
      // Wait
      cvWaitKey(WAIT_TIME);
    }
  }
  while(flag == true);
  
  cout << "Fine" << endl;
  
  cout << "----------Calibrating the stereo cameras----------" << endl;
  
  //calculate the object relative chessboard's points position
  nframes = active[0].size();
  objectPoints.resize(nframes * n);
  
  for(i = 0; i < NUM_Y; i++)
    for(j = 0; j < NUM_X; j++)
      objectPoints[(i * NUM_X) + j] = cvPoint3D32f(i * squareSize, j * squareSize, 0);

  for(i = 0; i < nframes; i++)
    copy(objectPoints.begin(), objectPoints.begin() + n, objectPoints.begin() + (i * n));
  
  npoints.resize(nframes, n);
  N = n * nframes;
  
  //initialise matrices
  CvMat _objectPoints = cvMat(1, N, CV_32FC3, &objectPoints[0]);
  CvMat _imagePoints1 = cvMat(1, N, CV_32FC2, &points[0][0]);
  CvMat _imagePoints2 = cvMat(1, N, CV_32FC2, &points[1][0]);
  CvMat _npoints = cvMat(1, npoints.size(), CV_32S, &npoints[0]);
  
  // calibrate the stereo cameras
  
  for(i=0; i < 5; i++)
  {
    d1Zeros[i] = 0.00;
    d2Zeros[i] = 0.00;
//     d1Zeros[i] = d1[i];
//     d2Zeros[i] = d2[i];
  }

  cvStereoCalibrate(&_objectPoints, &_imagePoints1, &_imagePoints2, &_npoints, 
		    &_m1, &_d1Zeros, &_m2, &_d2Zeros, 
		    imageSize, 
		    &_rot, &_trans, &_ess, &_fund,
		    cvTermCriteria(CV_TERMCRIT_ITER + CV_TERMCRIT_EPS, 100, 1e-5),
		    CV_CALIB_FIX_ASPECT_RATIO + CV_CALIB_FIX_INTRINSIC + 
		    CV_CALIB_FIX_K1 + CV_CALIB_FIX_K2 + CV_CALIB_FIX_K3 +
		    CV_CALIB_ZERO_TANGENT_DIST + CV_CALIB_SAME_FOCAL_LENGTH);
  
  // calibrate mono
  
//   cvCalibrateCamera2(&_objectPoints, &_imagePoints1, &_npoints, imageSize, &_m1, &_d1, NULL, NULL, 0);
//   cvCalibrateCamera2(&_objectPoints, &_imagePoints2, &_npoints, imageSize, &_m2, &_d2, NULL, NULL, 0);
  
  // if you want to check the result write here the checking code
  
  // calculate retification parameter
  cout << "Calculating retification parameters..." << endl; 
  
  // parameters "a manina"
  
//   d1Zeros[0] = -0.425346;
//   d1Zeros[1] = 0.146858;
//   d1Zeros[2] = -0.00271551;
//   d1Zeros[3] = 0.00490219;
//   d1Zeros[4] = 0;
//   d2Zeros[0] = -0.41324;
//   d2Zeros[1] = 0.158982;
//   d2Zeros[2] = -0.00148367;
//   d2Zeros[3] = 0.000609254;
//   d2Zeros[4] = 0;
    // parameters "a manina"
  
//   d1Zeros[0] = -0.425346;
//   d1Zeros[1] = 0.146858;
//   d1Zeros[2] = -0.00271551;
//   d1Zeros[3] = 0.00490219;
//   d1Zeros[4] = 0;
//   d2Zeros[0] = -0.41324;
//   d2Zeros[1] = 0.158982;
//   d2Zeros[2] = -0.00148367;
//   d2Zeros[3] = 0.000609254;
//   d2Zeros[4] = 0;
  
//   m1[0][0] = 258.245;
//   m1[1][1] = 257.123;
//   m1[0][2] = 157.631;
//   m1[1][2] = 113.148;
//   
//   m2[0][0] = 257.2;
//   m2[1][1] = 257.436;
//   m2[0][2] = 155.086;
//   m2[1][2] = 130.77;

//   m1[0][0] = 180;
//   m1[1][1] = 180;
//   m1[0][2] = 157.631;
//   m1[1][2] = 113.148;
//   
//   m2[0][0] = 180;
//   m2[1][1] = 180;
//   m2[0][2] = 155.086;
//   m2[1][2] = 113.77;
//   m1[0][0] = 258.245;
//   m1[1][1] = 257.123;
//   m1[0][2] = 157.631;
//   m1[1][2] = 113.148;
//   
//   m2[0][0] = 257.2;
//   m2[1][1] = 257.436;
//   m2[0][2] = 155.086;
//   m2[1][2] = 130.77;

//   m1[0][0] = 180;
//   m1[1][1] = 180;
//   m1[0][2] = 157.631;
//   m1[1][2] = 113.148;
//   
//   m2[0][0] = 180;
//   m2[1][1] = 180;
//   m2[0][2] = 155.086;
//   m2[1][2] = 113.77;
  
  trans[0] = -68;
  trans[1] = 0.1;
  trans[2] = 0.1;
  
//   float angle = 5.7;
  float angle = 0;
  
  rotX[0][0] = 1.0;
  rotX[0][1] = 0.0;
  rotX[0][2] = 0.0;
  rotX[1][0] = 0.0;
  rotX[1][1] = cos(angle*PI/180);
  rotX[1][2] = sin(angle*PI/180);
  rotX[2][0] = 0.0;
  rotX[2][1] = -sin(angle*PI/180);
  rotX[2][2] = cos(angle*PI/180);
  
//   angle = -7.1;
  angle = 0;

  rotY[0][0] = cos(angle*PI/180);
  rotY[0][1] = 0.0;
  rotY[0][2] = -sin(angle*PI/180);
  rotY[1][0] = 0.0;
  rotY[1][1] = 1.0;
  rotY[1][2] = 0.0;
  rotY[2][0] = sin(angle*PI/180);
  rotY[2][1] = 0.0;
  rotY[2][2] = cos(angle*PI/180);
  
//   angle = 0;
  angle = 0;
  
  rotZ[0][0] = cos(angle*PI/180);
  rotZ[0][1] = sin(angle*PI/180);
  rotZ[0][2] = 0.0;
  rotZ[1][0] = -sin(angle*PI/180);
  rotZ[1][1] = cos(angle*PI/180);
  rotZ[1][2] = 0.0;
  rotZ[2][0] = 0.0;
  rotZ[2][1] = 0.0;
  rotZ[2][2] = 1.0;
  
  CvMat *rotzy = cvCreateMat( 3, 3, CV_64FC1);
  
  cvMatMul(&_rotZ, &_rotY, rotzy);
  cvMatMul(rotzy, &_rotX, &_rot);
  
  
  // end parameters "a manina"
  
  cvStereoRectify(&_m1, &_m2, &_d1Zeros, &_d2Zeros, 
		  imageSize, 
		  &_rot, &_trans,
		  &_r1, &_r2, &_p1, &_p2, &_q,
		  0);
  
  cvInitUndistortRectifyMap(&_m1, &_d1Zeros, &_r1, &_p1, mx1, my1);
  cvInitUndistortRectifyMap(&_m2, &_d2Zeros, &_r2, &_p2, mx2, my2);
  //   trans[0] = -68;
//   trans[1] = 0.1;
//   trans[2] = 0.1;
  // load stereo images
  cout << "Loading stereo images" << endl;
  
  // left
  yarp::sig::file::read(tempYImage, "../../../3dVisionDatas/calibration/left4.ppm");
  //yarp::sig::file::read(tempYImage, "../../../3dVisionDatas/calibration/left4.ppm");
  //yarp::sig::file::read(tempYImage, "/home/nigno/Desktop/opencv/samples/c/scene_l.ppm");
      
  // Convert the image in a openCv image
  leftImg = cvCreateImage(cvSize(tempYImage.width(),  
				 tempYImage.height()), 
				 IPL_DEPTH_8U, 3 );
  
  cvCvtColor((IplImage*)tempYImage.getIplImage(), leftImg, CV_RGB2BGR);
  
//   cvSaveImage("../../../3dVisionDatas/maialinaLeft.jpg", leftImg);
  
  imageSize = cvGetSize(leftImg);
  
  leftGray = cvCreateImage(cvGetSize(leftImg), 8, 1);
  cvCvtColor(leftImg, leftGray, CV_BGR2GRAY);
  
  // right
  yarp::sig::file::read(tempYImage, "../../../3dVisionDatas/calibration/right4.ppm");
  //yarp::sig::file::read(tempYImage, "../../../3dVisionDatas/calibration/right4.ppm");
  //yarp::sig::file::read(tempYImage, "/home/nigno/Desktop/opencv/samples/c/scene_r.ppm");
  
  // Convert the image in a openCv image
  rightImg = cvCreateImage(cvSize(tempYImage.width(),  
				 tempYImage.height()), 
				 IPL_DEPTH_8U, 3 );
  
  cvCvtColor((IplImage*)tempYImage.getIplImage(), rightImg, CV_RGB2BGR);
  
//   cvSaveImage("../../../3dVisionDatas/maialinaRight.jpg", rightImg);
  rightGray = cvCreateImage(cvGetSize(rightImg), 8, 1);
  
  cvCvtColor(rightImg, rightGray, CV_BGR2GRAY);
  
  // Undistortion
  
  // invert
//   for(i = 0; i < 5; i++)
//   {
//     d1[i] = -d1[i];
//     d2[i] = -d2[i];
//   }
  
//   tempDistImage = cvCloneImage(leftGray);
//   cvRemap(tempDistImage, leftGray, mapXDis, mapYDis);
//   tempDistImage = cvCloneImage(rightGray);
//   cvRemap(tempDistImage, rightGray, mapXDis2, mapYDis2);
  
  CvMat *img1r = cvCreateMat(imageSize.height, imageSize.width, CV_8U);
  CvMat *img2r = cvCreateMat(imageSize.height, imageSize.width, CV_8U);
  
//   double dispDoub[imageSize.height][imageSize.width];
//   CvMat _disp = cvMat(imageSize.height, imageSize.width, CV_16S, dispDoub);
//   cvZero(&_disp);
// 
//   CvMat *disp = &_disp;
  CvMat *disp = cvCreateMat(imageSize.height, imageSize.width, /*CV_16S*/ CV_32F);
  CvMat *vdisp = cvCreateMat(imageSize.height, imageSize.width, CV_8U);
  CvMat *pair = cvCreateMat(imageSize.height, imageSize.width * 2, CV_8UC3);
  
  result3DImage = cvCreateImage(cvGetSize(leftImg), IPL_DEPTH_32F, 3);
  result2DPoints = cvCreateImage(cvGetSize(leftImg), IPL_DEPTH_32F, 3);

  // set correspondence parameters
  assert(bmState != 0);
  if (DEF)
  {
    bmState->preFilterSize = 41;
    bmState->preFilterCap = 31;
    bmState->SADWindowSize = 41;
    bmState->minDisparity = -64;
    bmState->numberOfDisparities = 128;
    bmState->textureThreshold = 10;
    bmState->uniquenessRatio = 15;
  }
  else
  {
    int numberOfDisparities = imageSize.width / 8;
    numberOfDisparities -= numberOfDisparities % 16;
    
    //bmState->roi1 = roi1;
    //bmState->roi2 = roi2;
    bmState->preFilterCap = 31;
    bmState->SADWindowSize = 15;
    bmState->minDisparity = 0;
    bmState->numberOfDisparities = numberOfDisparities;
    bmState->textureThreshold = 10;
    bmState->uniquenessRatio = 15;
    bmState->speckleWindowSize = 100;
    bmState->speckleRange = 32;
    bmState->disp12MaxDiff = 1;
  }
  
  // rectify images
  cvRemap(leftGray, img1r, mx1, my1);
  cvRemap(rightGray, img2r, mx2, my2);
 
  // find correspondence
  cvFindStereoCorrespondenceBM(img1r, img2r, disp, bmState);
  //cvFindStereoCorrespondenceBM(leftGray, rightGray, disp, bmState);
  cvNormalize(disp, vdisp, 0, 256, CV_MINMAX);

  // calculate real disparity
//   for(i = 0; i < imageSize.height; i++)
//     for(j = 0; j < imageSize.width; j++)
//       CV_MAT_ELEM(*vdisp, double, i, j)=(CV_MAT_ELEM(*disp, double, i, j) / pow(2, 4)) + 1;
  
  // Calculate q "a manina"
  
//   q[0][3] = -m1[0][2];
//   q[1][3] = -m1[1][2];
//   q[2][3] = m1[0][0];
//   q[3][2] = -1 / trans[0];
//   q[3][3] = (m1[0][2] - m2[0][2]) / trans[0];
  
//   q[0][3] = -m1[0][2];
//   q[1][2] = m1[0][0] / m1[1][1];
//   q[1][3] = -m1[1][2] * m1[0][0] / m1[1][1];
//   q[2][3] = m1[0][0];
//   q[3][2] = -1 / trans[0];
//   q[3][3] = -(m1[0][2] - m2[0][2]) / trans[0];
  
// posizione 3D a manina.  
  
  int rows, columns;
  rows = 4;
  columns = imageSize.height * imageSize.width;
  
  double pos2D[rows][columns], pos3D[rows][columns];
  CvMat _pos2D = cvMat(rows, columns, CV_64F, pos2D);
  CvMat _pos3D = cvMat(rows, columns, CV_64F, pos3D);
  cvZero(&_pos2D);
  cvZero(&_pos3D);
  
  for(i = 0; i < imageSize.height; i++)
    for(j = 0; j < imageSize.width; j++)
    {
      const float *ptr = (const float*)(disp->data.ptr + i * disp->step + j);
      
      pos2D[0][(i * imageSize.width) + j] = j;
      pos2D[1][(i * imageSize.width) + j] = i;
      pos2D[2][(i * imageSize.width) + j] = CV_MAT_ELEM(*disp, double, i, j);
      //pos2D[2][(i * imageSize.width) + j] = *ptr;
//       pos2D[2][(i * imageSize.width) + j] = dispDoub[i][j];
      pos2D[3][(i * imageSize.width) + j] = 1;
    }
  
  cvMatMul(&_q, &_pos2D, &_pos3D);
    
    
  cvReprojectImageTo3D(disp, result3DImage, &_q);
  
  int height     = result3DImage->height;
  int width      = result3DImage->width;
  int step       = result3DImage->widthStep/sizeof(float);
  int channels   = result3DImage->nChannels;
  float *data    = (float *)result3DImage->imageData;
  float min, max, max2;
  float w;
  int countNumPoints;
  countNumPoints = 0;
  min = 1000000;
  max = -1000000;
  max2 = -1000000;
  for(i = 0; i < height; i++)
    for(j = 0; j < width; j++)
    {
      if(data[i * step + j * channels + 2] < min)
	min = data[i * step + j * channels + 2];
      if(data[i * step + j * channels + 2] > max)
	max = data[i * step + j * channels + 2];
    }
  for(i = 0; i < height; i++)
    for(j = 0; j < width; j++)
      if(data[i * step + j * channels + 2] > max2 && data[i * step + j * channels + 2] < max)
      {
	max2 = data[i * step + j * channels + 2];
      }
  for(i = 0; i < height; i++)
    for(j = 0; j < width; j++)
    {
      if(data[i * step + j * channels + 2] > max2)
      {
	data[i * step + j * channels + 0] = 0;
	data[i * step + j * channels + 1] = 0;
	data[i * step + j * channels + 2] = max2;
	countNumPoints++;
      }
    }
    
    // Draw a line to test the retification
  for(i = 0; i < 240; i += 10)
  {
    cvLine(img1r, cvPoint(0,i), cvPoint(319, i), cvScalar(255, 0, 0), 0, 0);
    cvLine(img2r, cvPoint(0,i), cvPoint(319, i), cvScalar(255, 0, 0), 0, 0);
  }  
  
  for(i = 0; i < 240; i += 20)
  {
    cvLine(leftGray, cvPoint(0,i), cvPoint(319, i), cvScalar(255, 0, 0), 0, 0);
    cvLine(rightGray, cvPoint(0,i), cvPoint(319, i), cvScalar(255, 0, 0), 0, 0);
  }
  
  for(i = 0; i < 360; i += 20)
  {
    cvLine(leftGray, cvPoint(i,0), cvPoint(i, 239), cvScalar(255, 0, 0), 0, 0);
    cvLine(rightGray, cvPoint(i,0), cvPoint(i, 239), cvScalar(255, 0, 0), 0, 0);
  } 
    
  // Show the openCv images
  cvShowImage("OriginalLeft", leftGray);
  cvShowImage("GrayLeft", img1r);
  cvShowImage("OriginalRight", rightGray);
  cvShowImage("GrayRight", img2r);  
  
  cvShowImage("Disparity", vdisp);
  
  //-------------start print----------------------------
  
  cout << "M1" << endl;
  for(i = 0; i < 3; i++)
  {
    for(j = 0; j < 3; j++)
    {
      cout << m1[i][j] << " ";
    }
    cout << endl;
  }
  
  cout << "M2" << endl;
  for(i = 0; i < 3; i++)
  {
    for(j = 0; j < 3; j++)
    {
      cout << m2[i][j] << " ";
    }
    cout << endl;
  }

  cout << "D1" << endl;
  for(j = 0; j < 5; j++)
  {
    cout << d1Zeros[j] << " ";
  }
  cout << endl;
  
  cout << "D2" << endl;
  for(j = 0; j < 5; j++)
  {
    cout << d2Zeros[j] << " ";
  }
  cout << endl;
  
  cout << "R" << endl;
  for(i = 0; i < 3; i++)
  {
    for(j = 0; j < 3; j++)
    {
      cout << rot[i][j] << " ";
    }
    cout << endl;
  }
  
  cout << "T" << endl;
  for(j = 0; j < 3; j++)
  {
    cout << trans[j] << " ";
  }
  cout << endl;
  
  cout << "Q" << endl;
  for(i = 0; i < 4; i++)
  {
    for(j = 0; j < 4; j++)
    {
      cout << q[i][j] << " ";
    }
    cout << endl;
  }
  //-------------end print----------------------------
  
  int key;
  
  while (key != 'q')
    key = cvWaitKey(100);
  
  //extracting depth
//   imageX = cvCreateImage(imageSize, IPL_DEPTH_32F, 1);
//   imageY = cvCreateImage(imageSize, IPL_DEPTH_32F, 1);
//   imageZ = cvCreateImage(imageSize, IPL_DEPTH_32F, 1);
//   cvSplit(result3DImage, imageX, imageY, imageZ, NULL);
  //cvNormalize(imageZ, imageZ, 0, 256, CV_MINMAX);
  
  cvShowImage("Disparity", result3DImage);
  
  while (key != 't')
    key = cvWaitKey(100);
  
//   countNumPoints = imageSize.width * imageSize.height;
  double object_points[countNumPoints][3];
  CvMat _object_points = cvMat(countNumPoints, 3, CV_64F, object_points);
  double image_points[countNumPoints][2];
  CvMat _image_points = cvMat(countNumPoints, 2, CV_64F, image_points);
  
  double obRot[3];
  CvMat _obRot = cvMat(3, 1, CV_64F, obRot);
  double obTran[3];
  CvMat _obTran = cvMat(3, 1, CV_64F, obTran);
  
  for(i = 0; i < 3; i++)
  {
    obRot[i] = 0;
    obTran[i] = 0;
  }
  
  double qInv[4][4];
  CvMat _qInv = cvMat(4, 4, CV_64F, qInv);

  cvInvert(&_q, &_qInv);
  
  angle = 0;
  
  rotX[0][0] = 1.0;
  rotX[0][1] = 0.0;
  rotX[0][2] = 0.0;
  rotX[1][0] = 0.0;
  rotX[1][1] = cos(angle*PI/180);
  rotX[1][2] = sin(angle*PI/180);
  rotX[2][0] = 0.0;
  rotX[2][1] = -sin(angle*PI/180);
  rotX[2][2] = cos(angle*PI/180);
  
//   angle = -7.1;
  angle = 0;

  rotY[0][0] = cos(angle*PI/180);
  rotY[0][1] = 0.0;
  rotY[0][2] = -sin(angle*PI/180);
  rotY[1][0] = 0.0;
  rotY[1][1] = 1.0;
  rotY[1][2] = 0.0;
  rotY[2][0] = sin(angle*PI/180);
  rotY[2][1] = 0.0;
  rotY[2][2] = cos(angle*PI/180);
  
//   angle = 0;
  angle = 0;
  
  rotZ[0][0] = cos(angle*PI/180);
  rotZ[0][1] = sin(angle*PI/180);
  rotZ[0][2] = 0.0;
  rotZ[1][0] = -sin(angle*PI/180);
  rotZ[1][1] = cos(angle*PI/180);
  rotZ[1][2] = 0.0;
  rotZ[2][0] = 0.0;
  rotZ[2][1] = 0.0;
  rotZ[2][2] = 1.0;
  
  rotzy = cvCreateMat( 3, 3, CV_64FC1);
  
  cvMatMul(&_rotZ, &_rotY, rotzy);
  cvMatMul(rotzy, &_rotX, &_rot);
  
  float rotx, roty, rotz;
  int newx, newy;
  
  IplImage *rotatedImage = cvCreateImage(imageSize, 8, 1);  
  float *data2 = (float *)rotatedImage->imageData;
  float *data3 = (float *)result2DPoints->imageData;
  int step2       = rotatedImage->widthStep/sizeof(float);
  int channels2   = rotatedImage->nChannels;
  int step3       = result2DPoints->widthStep/sizeof(float);
  int channels3   = result2DPoints->nChannels;
  int countNumPoints2;
  countNumPoints2 = 0;
//   for(i = 0; i < height; i++)
//     for(j = 0; j < width; j++)
//       data2[i * step2 + j * channels2 + 0] = 0;
  for(i = 0; i < height; i++)
    for(j = 0; j < width; j++)
    {
      if (data[i * step + j * channels + 0] != 0 || data[i * step + j * channels + 1] != 0)
      {
	rotx = (rot[0][0] * data[i * step + j * channels + 0]) +
	       (rot[0][1] * data[i * step + j * channels + 1]) +
	       (rot[0][2] * data[i * step + j * channels + 2]);
	roty = (rot[1][0] * data[i * step + j * channels + 0]) +
	       (rot[1][1] * data[i * step + j * channels + 1]) +
	       (rot[1][2] * data[i * step + j * channels + 2]);
	rotz = (rot[2][0] * data[i * step + j * channels + 0]) +
	       (rot[2][1] * data[i * step + j * channels + 1]) +
	       (rot[2][2] * data[i * step + j * channels + 2]);
	newx = (qInv[0][0] * rotx) +
	       (qInv[0][1] * roty) +
	       (qInv[0][2] * rotz) +
	       qInv[0][3];
	newy = (qInv[1][0] * rotx) +
	       (qInv[1][1] * roty) +
	       (qInv[1][2] * rotz) +
	       qInv[1][3];
// 	newx = (q[0][0] * rotx) +
// 	       (q[0][1] * roty) +
// 	       (q[0][2] * rotz) +
// 	       q[0][3];
// 	newy = (q[1][0] * rotx) +
// 	       (q[1][1] * roty) +
// 	       (q[1][2] * rotz) +
// 	       q[1][3];
	       
	if(newy >= 0 && newy < height && newx >= 0 && newx < width)
	  data2[newy * step2 + newx * channels2 + 0] = CV_MAT_ELEM(*img1r, double, i, j);
	
	
// 	rotx = (rot[0][0] * data[i * step + j * channels + 0]) +
// 	       (rot[0][1] * data[i * step + j * channels + 1]) +
// 	       (rot[0][2] * data[i * step + j * channels + 2]);
// 	roty = (rot[1][0] * data[i * step + j * channels + 0]) +
// 	       (rot[1][1] * data[i * step + j * channels + 1]) +
// 	       (rot[1][2] * data[i * step + j * channels + 2]);
// 	rotz = (rot[2][0] * data[i * step + j * channels + 0]) +
// 	       (rot[2][1] * data[i * step + j * channels + 1]) +
// 	       (rot[2][2] * data[i * step + j * channels + 2]);
// 	data[i * step + j * channels + 0] = rotx;
// 	data[i * step + j * channels + 1] = roty;
// 	data[i * step + j * channels + 2] = rotz;
// 	object_points[countNumPoints2][0] = data[i * step + j * channels + 0];
// 	object_points[countNumPoints2][1] = data[i * step + j * channels + 1];
// 	object_points[countNumPoints2++][2] = data[i * step + j * channels + 2];
      }
//       object_points[i * width + j][0] = data[i * step + j * channels + 0];
//       object_points[i * width + j][1] = data[i * step + j * channels + 1];
//       object_points[i * width + j][2] = data[i * step + j * channels + 2];
    }
  
// //   for(i = 0; i < 5; i++)
// //     d1[i] = 0.0;
//   for(i = 0; i < 3; i++)
//     for(j = 0; j < 3; j++)
//       m1[i][j] = p1[i][j];
//     
//   cvProjectPoints2(&_object_points, &_obRot, &_obTran, &_m1, &_d1Zeros, &_image_points);
//   
//   countNumPoints2 = 0;
//   for(i = 0; i < height; i++)
//     for(j = 0; j < width; j++)
//     {//   d1Zeros[0] = -0.425346;
// //   d1Zeros[1] = 0.146858;
// //   d1Zeros[2] = -0.00271551;
// //   d1Zeros[3] = 0.00490219;
// //   d1Zeros[4] = 0;
// //   d2Zeros[0] = -0.41324;
// //   d2Zeros[1] = 0.158982;
// //   d2Zeros[2] = -0.00148367;
// //   d2Zeros[3] = 0.000609254;
// //   d2Zeros[4] = 0;
// //       newx = image_points[i * width + j][0];
// //       newy = image_points[i * width + j][1];
// //       const float *ptr = (const float*)(img1r->data.ptr + i * img1r->step + j);
// //       if(newy >= 0 && newy < height && newx >= 0 && newx < width)
// // 	data2[newy * step2 + newx * channels2 + 0] = *ptr;
//       if (data[i * step + j * channels + 0] != 0 || data[i * step + j * channels + 1] != 0)
//       {
// 	newx = image_points[countNumPoints2][0];
// 	newy = image_points[countNumPoints2++][1];
// 	if(newy >= 0 && newy < height && newx >= 0 && newx < width)
// 	  data2[newy * step2 + newx * channels2 + 0] = CV_MAT_ELEM(*img1r, double, i, j);
//       }      
//     }
    
//   cvPerspectiveTransform(result3DImage, result2DPoints, &_qInv);   
//   
//   for(i = 0; i < height; i++)uchar
//     for(j = 0; j < width; j++)
//     {
//       if (data[i * step + j * channels + 0] == 0 && data[i * step + j * channels + 1] == 0)
//       {
// 	newx = data3[i * step3 + j * channels3 + 0];
// 	newy = data3[i * step3 + j * channels3 + 1];
// 	data2[newy * step2 + newx * channels2 + 0] = CV_MAT_ELEM(*img1r, double, i, j);
//       }
//     }
  
  cvSave("result3dImage.txt", result3DImage);
  
  // show the disparity map
  cvShowImage("Disparity", rotatedImage);
  
//   disparityFile.open ("disparity.txt", ios::app);
  
//   cout << "Disparity map" << endl;
//   for(i = 0; i < imageSize.height; i++)
//   {
//     for(j = 0; j < imageSize.width; j++)
//     {
//       cout << CV_MAT_ELEM(*disp, double, i, j) << " ";
//       disparityFile << CV_MAT_ELEM(*disp, double, i, j) << " ";
//     }
//     cout << endl;
//     disparityFile << endl;
//   }
//   disparityFile.close();
  
//   disparityFile.open ("disparity.txt", ios::app);
//   uchar
//   cout << "3D pos" << endl;
//   for(i = 0; i < 4; i++)
//   {
//     for(j = 0; j < imageSize.width * imageSize.height; j++)
//     {
//       if (isnan(pos3D[i][j]))
// 	pos3D[i][j] = 0;
//       cout << pos3D[i][j] << " ";
//       disparityFile << pos3D[i][j] << " ";
//     }
//     cout << endl;
//     disparityFile << endl;
//   }
//   disparityFile.close();
  
  // wait
  
  while (key != 'q')
    key = cvWaitKey(100);
  
  cvDestroyWindow("OriginalLeft");
  cvDestroyWindow("GrayLeft");
  cvDestroyWindow("OriginalRight");
  cvDestroyWindow("GrayRight");
  cvDestroyWindow("Disparity");
  
  cvReleaseImage(&tempCImage);
  cvReleaseImage(&gray);
  cvReleaseImage(&leftImg);
  cvReleaseImage(&rightImg);
  cvReleaseImage(&leftGray);
  cvReleaseImage(&rightGray);
  cvReleaseImage(&imageX);
  cvReleaseImage(&imageY);
  cvReleaseImage(&imageZ);
  cvReleaseImage(&rotatedImage);
  cvReleaseImage(&result3DImage);
  cvReleaseImage(&result2DPoints);
  cvReleaseImage(&mapXDis);
  cvReleaseImage(&mapXDis2);
  cvReleaseImage(&mapYDis);
  cvReleaseImage(&mapYDis2);
  cvReleaseImage(&tempDistImage);
  
  
  cvReleaseStereoBMState(&bmState);
  
  cvReleaseMat(&mx1);
  cvReleaseMat(&my1);
  cvReleaseMat(&mx2);
  cvReleaseMat(&my2);
  cvReleaseMat(&img1r);
  cvReleaseMat(&img2r);
  cvReleaseMat(&disp);
  cvReleaseMat(&vdisp);
  cvReleaseMat(&pair);
  cvReleaseMat(&rotzy);
  
  return 0;
}

bool fileExists(string strFilename) { 
  struct stat stFileInfo; 
  bool blnReturn; 
  int intStat; 

  intStat = stat(strFilename.c_str(),&stFileInfo); 
 
  if(intStat == 0) 
  { 
    blnReturn = true; 
  } 
  else 
  { 
    blnReturn = false; 
  } 
   
  return(blnReturn); 
}



