#include "stdafx.h"
#include "CCamera.h"
#include "opencv2/calib3d/calib3d.hpp"
#include <time.h>
#include "macro_debug.h"
#include "cvxMat.h"
#include "nvmath.h"
#include "cvxGeometry.h"
#include "cvxSf.h"

using namespace::cv;
using namespace::std;

using nv::vec4f;
using nv::vec3f;
using nv::matrix4f;



/************************************************************************/
/* 
	CCamera
*/
/************************************************************************/

CCamera::CCamera()
{
	initCamera();
}


CCamera::~CCamera()
{
	m_imagePoints.clear();
	m_objectPoints.clear();
}

static void isaveImage(IplImage *img, int i, string prefix)
{
	char temp[64] = {NULL};
	string str;
	sprintf(temp, "%d", i);
	string sName(temp);
	sName =  prefix + sName + string(".png");
	cvSaveImage(sName.c_str(), img);
}


bool CCamera::calib(int flags, bool computerErr)
{
	if (!m_imagePoints.empty() && !m_objectPoints.empty() && m_imagePoints.size() == m_objectPoints.size())
	{
		m_flags = flags;
		if(runCalibration(computerErr))
		{
			cv::calibrationMatrixValues(m_intrinsic, m_imageSize, m_apertureWidth, m_apertureHeight, 
				m_fovx, m_fovy, m_focalLength, m_principalPoint, m_aspectRatio);
			m_principalPoint.x = m_principalPoint.x * m_imageSize.width / m_apertureWidth;
			m_principalPoint.y = m_principalPoint.y * m_imageSize.height/ m_apertureHeight;

		}
		computerParameterError();
		return true;
	}
	//	fprintf(stderr, "calib failed %s %d", __FILE__, __LINE__);
	return false;

}

void CCamera::computerParameterError(void)
{
	//this code is tansformed from go_calib_optim_iter.m

	assert(!m_imagePoints.empty() && !m_objectPoints.empty());
	assert(m_flags & CV_CALIB_FIX_K3);

	int numImg = m_imagePoints.size();
	int numPts = m_imagePoints[0].size();
	//computer jacobian matrix	
	Mat jacoBian = Mat::zeros(8 + 6 * numImg, 8 + 6 * numImg, CV_64F);
	vector<Point2f> projectMeasurePtsDif;  //image point difference between measured image points and re-projecte image points

	for (int i = 0; i<numImg; ++i)
	{
		vector<Point2f> imgPts(numPts);
		Mat jaco;
		cv::projectPoints(m_objectPoints[i], m_RM[i], m_TM[i], m_intrinsic, m_distortionCoeffs, 
						  imgPts, jaco, 0.0);
		/************************************************************************/
		/* 
		pdpdrot = &(dpdrot = jacobian.colRange(0, 3));
		pdpdt = &(dpdt = jacobian.colRange(3, 6));

		pdpdf = &(dpdf = jacobian.colRange(6, 8));
		pdpdc = &(dpdc = jacobian.colRange(8, 10));
		pdpddist = &(dpddist = jacobian.colRange(10, 10+ndistCoeffs));
		*/
		/************************************************************************/
		int test = 0;

		/************************************************************************/
		/* 
		A = [dxdf dxdc dxdalpha dxdk]'; 6-15
		B = [dxdom dxdT]';  0-5

		JJ3(1:10,1:10) = JJ3(1:10,1:10) + sparse(A*A');
		JJ3(15+6*(kk-1) + 1:15+6*(kk-1) + 6,15+6*(kk-1) + 1:15+6*(kk-1) + 6) = sparse(B*B');

		AB = sparse(A*B');
		JJ3(1:10,15+6*(kk-1) + 1:15+6*(kk-1) + 6) = AB;
		JJ3(15+6*(kk-1) + 1:15+6*(kk-1) + 6,1:10) = (AB)';
		*/
		/************************************************************************/
		assert(jaco.cols == 15);

		Mat A = jaco.colRange(6, 14);  //last column is not used
		Mat B = jaco.colRange(0, 6);
		A = A.t();
		B = B.t();
		Range r(8+i*6, 8+i*6+6);
		jacoBian(Range(0, 8), Range(0, 8)) = jacoBian(Range(0, 8), Range(0, 8)) + A * A.t();
		Mat BBt = B * B.t();
		BBt.copyTo(jacoBian(r,r));

		Mat ABt = A * B.t();
		ABt.copyTo(jacoBian(Range(0, 8), r));
		Mat BAt = ABt.t();
		BAt.copyTo(jacoBian(r, Range(0, 8)));

		for (int j = 0; j<m_imagePoints[i].size(); ++j)
		{
			Point2f p1 = m_imagePoints[i][j];
			Point2f p2 = imgPts[j];
			Point2f p_dif = Point2f(p1.x - p2.x, p1.y - p2.y);
			projectMeasurePtsDif.push_back(p_dif);
		}
	}

	//cJJ2_inv = inv(JJ3);
	Mat JtJ_inv = jacoBian.inv(DECOMP_LU);

	//computer mean and standard deviation
	Mat mean;
	//Mat std_dev;
	cv::meanStdDev(Mat(projectMeasurePtsDif), mean, m_stdDev);

	float sigma_x  = (m_stdDev.at<double>(0, 0) + m_stdDev.at<double>(1, 0))/2.0;
	for (int i = 0; i<8; ++i)
	{
		m_parameterError.at<double>(i, 0) = 3 * sqrt(double(JtJ_inv.at<double>(i, i))) * sigma_x;
	//	fprintf(stdout, "%f ", m_parameterError.at<double>(i, 0));
	}
//	fprintf(stdout, "\n");

}

double CCamera::GetReProjectionError(const Mat& cameraMatrix, const Mat& distCoeffs, 									
									const vector<Point3f> &objectPoints,
									const vector<Point2f> &imagePoints, 
									FILE *pf)
{
	assert(pf);
	assert(imagePoints.size() == objectPoints.size());
	//computer reprojection error
	Mat rvec;
	Mat tvec;	
	vector<Point2f> reprojImgPts;
	double error = 0;
	cv::solvePnP(Mat(objectPoints), Mat(imagePoints), cameraMatrix, distCoeffs, rvec, tvec);
	cv::projectPoints(Mat(objectPoints),rvec, tvec,	cameraMatrix, distCoeffs, reprojImgPts);
	for (int i = 0; i<reprojImgPts.size(); ++i)
	{
		Point2f p_dif = reprojImgPts[i] - imagePoints[i];
		fprintf(pf, "%f %f ", p_dif.x, p_dif.y);
		error += sqrt(p_dif.x * p_dif.x + p_dif.y * p_dif.y);
	}
	fprintf(pf, "\n");
	printf("average image reprojection error %f\n", error/imagePoints.size());
	return error;
}



void CCamera::storeCurrentCorner2d(void)
{
	m_imagePoints.push_back(m_currentCorners2d);
}


bool CCamera::runCalibration(bool computerErr)
{
	configFlags();

	bool ok = CCamera::calibration(m_objectPoints, m_imagePoints, m_imageSize, m_flags, m_intrinsic,
		m_distortionCoeffs, m_RM, m_TM);
	if (ok && computerErr)
	{

		m_totalAvgErr = CCamera::computeReprojectionErrors(m_objectPoints, m_imagePoints,
			m_RM, m_TM, m_intrinsic, 
			m_distortionCoeffs, m_reprojErrs, m_reprojectPts);
	}
	return ok;

}

void CCamera::printReprojectionError(void)
{
	
	Point2f dif_max(0, 0);
	Point2f dif_min(INT_MAX, INT_MAX);
	for(int i = 0; i < (int)m_objectPoints.size(); i++ )
	{
		vector<Point2f> imagePoints2;
		cv::projectPoints(Mat(m_objectPoints[i]), m_RM[i], m_TM[i],
						  m_intrinsic, m_distortionCoeffs, imagePoints2);
		for (int k = 0; k<imagePoints2.size(); ++k)
		{
			Point2f p_dif = imagePoints2[k] - m_imagePoints[i][k];
			dif_max.x = std::max(p_dif.x, dif_max.x);
			dif_max.y = std::max(p_dif.y, dif_max.y);
			dif_min.x = std::min(p_dif.x, dif_min.x);
			dif_min.y = std::min(p_dif.y, dif_min.y);
			fprintf(stdout, "%f %f", p_dif.x, p_dif.y);
		}
		fprintf(stdout, "\n");
	}
	fprintf(stdout, "%f %f\n", dif_max.x, dif_max.y);
	fprintf(stdout, "%f %f\n", dif_min.x, dif_min.y);	
}


static void intersectionAngle(const vec3f &p0, const vec3f &p1, const vec3f &p2, const double *rot33, float &angle)
{
	//assert(p0 != p1 && p0 != p2 && p1 != p2);
	matrix4f rot = matrix4f(rot33[0], rot33[1], rot33[2], 0.0,
							rot33[3], rot33[4], rot33[5], 0.0,
							rot33[6], rot33[7], rot33[8], 0.0,
							0.0, 0.0, 0.0, 1.0);
	nv::vec3f norm1 = nv::cross(p1-p0, p2-p0);
	nv::vec4f _p0 = rot*vec4f(p0, 1);
	vec4f _p1 = rot*vec4f(p1, 1);
	vec4f _p2 = rot*vec4f(p2, 1);
	vec3f norm2 = nv::cross(vec3f(_p1._array) - vec3f(_p0._array), vec3f(_p2._array) - vec3f(_p0._array));
	norm1 = nv::normalize(norm1);
	norm2 = nv::normalize(norm2);
	angle = acos(nv::dot(norm1, norm2));
}

void CCamera::saveCamera(const string& filename)
{
	FILE *pf = fopen(filename.c_str(), "w");	
	assert(pf);
	for (int i = 0; i<3; ++i)
	{
		for (int j = 0; j<3; ++j)
		{
			fprintf(pf, "%f ", m_intrinsic.at<double>(i, j));
		}		
	}
	fprintf(pf, "%\n");
	fprintf(pf, "%d\n", m_distortionCoeffs.rows);
	for (int i = 0; i<m_distortionCoeffs.rows; ++i)
	{
		fprintf(pf, "%f ", m_distortionCoeffs.at<double>(i, 0));
	}	
	fclose(pf);
}
void CCamera::saveIntrinsic(const string &filename)
{
	FILE *pf = fopen(filename.c_str(), "w");	
	assert(pf);
	//focal length
	float fx = m_intrinsic.at<double>(0, 0);
	float fy = m_intrinsic.at<double>(1, 1);
	fprintf(pf, "%8.2f %8.2f ", fx, fy);

	//principla point
	float cx = m_intrinsic.at<double>(0, 2);
	float cy = m_intrinsic.at<double>(1, 2);
	fprintf(pf, "%8.2f %8.2f ", cx, cy);

	//distortion
	float k1, k2, p1, p2, k3;
	k1 = m_distortionCoeffs.at<double>(0, 0);
	k2 = m_distortionCoeffs.at<double>(1, 0);
	p1 = m_distortionCoeffs.at<double>(2, 0);
	p2 = m_distortionCoeffs.at<double>(3, 0);
	k3 = m_distortionCoeffs.at<double>(4, 0);
	fprintf(pf, "%8.6f %8.6f %8.6f %8.6f %8.6f\n", k1, k2, p1, p2, k3);

	//parameter error
	float fx_e, fy_e, cx_e, cy_e, k1_e, k2_e, p1_e, p2_e;
	fx_e = m_parameterError.at<double>(0, 0);
	fy_e = m_parameterError.at<double>(1, 0);
	cx_e = m_parameterError.at<double>(2, 0);
	cy_e = m_parameterError.at<double>(3, 0);
	k1_e = m_parameterError.at<double>(4, 0);
	k2_e = m_parameterError.at<double>(5, 0);
	p1_e = m_parameterError.at<double>(6, 0);
	p2_e = m_parameterError.at<double>(7, 0);
	if (m_flags | CV_CALIB_FIX_K2)
	{
		k2_e = 0.0;
	}
	fprintf(pf, "%8.2f %8.2f %8.2f %8.2f %8.6f %8.6f %8.6f %8.6f %8.6f\n", fx_e, fy_e, cx_e, cy_e, k1_e, k2_e, p1_e, p2_e, 0.0);

	float e_x, e_y, re_proj_err;
	e_x = m_stdDev.at<double>(0, 0);
	e_y = m_stdDev.at<double>(1, 0);
	re_proj_err = m_totalAvgErr;
	fprintf(pf, "%f %f %f\n", e_x, e_y, re_proj_err);

	float f_phy;
	f_phy = m_focalLength;	
	fprintf(pf, "%f\n", f_phy);
	fprintf(pf, "%f %f\n", m_fovx, m_fovy);
	fclose(pf);
}

void CCamera::saveIntrinsic(const string &filename, bool isAppend)
{
	if (!isAppend)
	{
		saveIntrinsic(filename);
	}
	else
	{
		FILE *pf = fopen(filename.c_str(), "a");	
		assert(pf);
		fseek(pf, 0, SEEK_END);
		fprintf(pf, "\n");
		//focal length
		float fx = m_intrinsic.at<double>(0, 0);
		float fy = m_intrinsic.at<double>(1, 1);
		fprintf(pf, "%f %f ", fx, fy);

		//principla point
		float cx = m_intrinsic.at<double>(0, 2);
		float cy = m_intrinsic.at<double>(1, 2);
		fprintf(pf, "%f %f ", cx, cy);

		//distortion
		float k1, k2, p1, p2, k3;
		k1 = m_distortionCoeffs.at<double>(0, 0);
		k2 = m_distortionCoeffs.at<double>(1, 0);
		p1 = m_distortionCoeffs.at<double>(2, 0);
		p2 = m_distortionCoeffs.at<double>(3, 0);
		k3 = m_distortionCoeffs.at<double>(4, 0);
		fprintf(pf, "%f %f %f %f %f\n", k1, k2, p1, p2, k3);

		//parameter error
		float fx_e, fy_e, cx_e, cy_e, k1_e, k2_e, p1_e, p2_e;
		fx_e = m_parameterError.at<double>(0, 0);
		fy_e = m_parameterError.at<double>(1, 0);
		cx_e = m_parameterError.at<double>(2, 0);
		cy_e = m_parameterError.at<double>(3, 0);
		k1_e = m_parameterError.at<double>(4, 0);
		k2_e = m_parameterError.at<double>(5, 0);
		p1_e = m_parameterError.at<double>(6, 0);
		p2_e = m_parameterError.at<double>(7, 0);
		fprintf(pf, "%f %f %f %f %f %f %f %f %f\n", fx_e, fy_e, cx_e, cy_e, k1_e, k2_e, p1_e, p2_e, 0.0);

		float e_x, e_y, re_proj_err;
		e_x = m_stdDev.at<double>(0, 0);
		e_y = m_stdDev.at<double>(1, 0);
		re_proj_err = m_totalAvgErr;
		fprintf(pf, "%f %f %f\n", e_x, e_y, re_proj_err);

		float f_phy;
		f_phy = m_focalLength;	
		fprintf(pf, "%f\n", f_phy);
		fclose(pf);

	}

}

void CCamera::saveExtrinsic(const string &filename)
{
	FILE *pf = fopen(filename.c_str(), "w");	
	assert(pf);
	fprintf(pf, "%d\n", m_RM.size());
	for (int i = 0; i<m_RM.size(); ++i)
	{
		float alpha, beta, gamma;
		alpha = m_RM[i].at<double>(0, 0); 
		beta  = m_RM[i].at<double>(1, 0); 
		gamma = m_RM[i].at<double>(2, 0); 
		fprintf(pf, "%f %f %f\n", alpha, beta, gamma);
	}
	fprintf(pf, "\n");
	for (int i = 0; i<m_TM.size(); ++i)
	{
		fprintf(pf, "%f %f %f\n", m_TM[i].at<double>(0, 0), m_TM[i].at<double>(1, 0), m_TM[i].at<double>(2, 0));
	}
	fclose(pf);
}

void CCamera::computerEulerAngle(float unit_length, int w, int h)
{
	assert(m_RM.size() == m_TM.size());
	assert(m_RM.size() > 0);

	m_thetaVec.clear();
	m_phiVec.clear();
	m_zDistanceVec.clear();
	m_eulerAngleVec.clear();
	m_rodriguesAngle.clear();
	

	for (int i = 0; i<m_RM.size(); ++i)
	{
		double r33[9];
		double p0[] = {0, 0, 0};
		double p1[] = {unit_length*w, unit_length*h, 0};
		double p2[] = {0, unit_length*h, 0};
		cv::Mat p0_mat(3, 1, CV_64F, p0);
		cv::Mat p1_mat(3, 1, CV_64F, p1);
		cv::Mat p2_mat(3, 1, CV_64F, p2);
		cv::Mat rMat(3, 3, CV_64F, r33);
		cv::Rodrigues(m_RM[i], rMat);

		//get euler angle
		Mat tR, tQ;
		Vec3d euler = cv::RQDecomp3x3(rMat, tR, tQ);		
		m_eulerAngleVec.push_back(euler);

		{
			//rodrigues angle;
			double rx = m_RM[i].at<double>(0, 0);
			double ry = m_RM[i].at<double>(1, 0);
			double rz = m_RM[i].at<double>(2, 0);
			float rodAngle = 2 * atan(sqrt(rx*rx + ry*ry + rz*rz)) * 180/CV_PI;
			m_rodriguesAngle.push_back(rodAngle);
		}
		

		p0_mat = rMat * p0_mat;
		p1_mat = rMat * p1_mat;
		p2_mat = rMat * p2_mat;

		Mat p01 = p1_mat - p0_mat;
		Mat p02 = p2_mat - p0_mat;
		Mat ori = p02.cross(p01);

		float x = ori.at<double>(0, 0);
		float y = ori.at<double>(1, 0);
		float z = ori.at<double>(2, 0);
		float len = 1.0/sqrt(1.0 *x*x + y*y + z*z);
		x *= len; 
		y *= len;
		z *= len;

		float theta, phi;
		CvxGeometry::GetEulerAngleInSphereCoor(x, y, z, theta, phi);

		theta *= 180/CV_PI;
		theta = 180 - theta;
		phi   *= 180/CV_PI;
		m_thetaVec.push_back(theta);
		m_phiVec.push_back(phi);

		
		float z_dis = 0.0f;
		double center[] = {unit_length * w/2, unit_length * h/2, 0};
		Mat center_mat = Mat(3, 1, CV_64F, center);
		center_mat = rMat * center_mat;
		center_mat = m_TM[i] + center_mat;
		z_dis = center_mat.at<double>(2, 0);
		m_zDistanceVec.push_back(z_dis);
	}
}

void CCamera::computerNormal(float unit_length, const CvSize &patternSize)
{
	assert(m_RM.size() > 0);
	m_planeNormal.clear();	

	//get normal of each target plane
	for (int i = 0; i<m_RM.size(); ++i)
	{
		Vec3d normal;
		Point3f pos;
		cv::solvePnP(m_objectPoints[0], m_imagePoints[i], m_intrinsic, m_distortionCoeffs, m_RM[i], m_TM[i], false);
		CvxGeometry::GetPlaneNormalAndPatternCenterPosition(m_RM[i], m_TM[i], unit_length, 
				patternSize, normal, pos);
		m_planeNormal.push_back(normal);
	}
}

void CCamera::bundleAdjust3DLinear(float unitLength, const CvSize &patternSize)
{
	assert(m_imagePoints.size() == m_objectPoints.size());
	assert(m_imagePoints.size() != 0);	

	//undistort image points	

	vector<map<int, double> > matrix_vec;
	vector<double> right_vec;

	//computer real target position

	for (int i = 0; i<m_imagePoints.size(); ++i)
	{
		//R T
		vector<Point2f> distortedPts;
	//	vector<Point2f> unDistortedPts;
		double rot[9], trans[3];
		cv::Mat rMat(3, 3, CV_64F, rot);
		cv::Rodrigues(m_RM[i], rMat);
		trans[0] = m_TM[i].at<double>(0, 0);
		trans[1] = m_TM[i].at<double>(1, 0);
		trans[2] = m_TM[i].at<double>(2, 0);

		//points position in sensor
		distortedPts = m_imagePoints[i];		
		//x' in opencv
		//! have problem in opencv 2.4
		Mat undistortedMat;
		cv::undistortPoints(Mat(distortedPts), undistortedMat, m_intrinsic, m_distortionCoeffs);		

		for (int j = 0; j<distortedPts.size(); ++j)
		{
			float xx = undistortedMat.at<float>(j, 0);
			float yy = undistortedMat.at<float>(j, 1);
			Point2f p0 = Point2f(xx, yy);
			//x 3*j, y 3*j+1 z 3*j+2			
			int xIdx = 3*j;
			int yIdx = 3*j+1;
			int zIdx = 3*j+2;
			{
				map<int, double> left_map;
				double right_val = 0.0;

				left_map[xIdx] = rot[0] - p0.x * rot[6];
				left_map[yIdx] = rot[1] - p0.x * rot[7];
				left_map[zIdx] = rot[2] - p0.x * rot[8];
				right_val = p0.x * trans[2] - trans[0];
				matrix_vec.push_back(left_map);
				right_vec.push_back(right_val);
			}
			{
				map<int, double> left_map;
				double right_val = 0.0;

				left_map[xIdx] = rot[3] - p0.y * rot[6];
				left_map[yIdx] = rot[4] - p0.y * rot[7];
				left_map[zIdx] = rot[5] - p0.y * rot[8];
				right_val = p0.y*trans[2] - trans[1];
				matrix_vec.push_back(left_map);
				right_vec.push_back(right_val);
			}
		}
		int test = 1;
	}
	assert(matrix_vec.size() == right_vec.size());
	vector<double> result;
	result.resize(m_objectPoints[0].size()*3);
	CvxSF::LeastSquare(matrix_vec, right_vec, result);

	FILE *pf = fopen("target_3d.txt", "w");
	assert(pf);
	fprintf(pf, "dif\n");
	fprintf(pf, "%d %d\n", patternSize.width, patternSize.height);
	for (int y = 0; y<patternSize.height; ++y)
	{
		for (int x = 0; x<patternSize.width; ++x)
		{
			int idx = y * patternSize.width + x;
			float dif_x = result[3*idx] - x * unitLength;
			float dif_y = result[3*idx+1] - y * unitLength;
			float dif_z = result[3*idx+2];
			fprintf(pf, "%f %f %f\n", dif_x, dif_y, dif_z);
		}
	}
	fprintf(pf, "coordinate\n");
	fprintf(pf, "%d %d\n", patternSize.width, patternSize.height);
	for (int i = 0; i<patternSize.width * patternSize.height; ++i)
	{
		fprintf(pf, "%f %f %f\n", result[3*i], result[3*i+1], result[3*i+2]);
	}
	fclose(pf);	
	
	// calib twice by new 3d position computer reprojection
	vector<Point3f> objPts;
	for (int i = 0; i<result.size(); i+=3)
	{
		Point3f p0;
		p0.x = result[i] - result[0];
		p0.y = result[i+1] - result[1];
		p0.z = result[i+2] - result[2];
		objPts.push_back(p0);
	}

	m_objectPoints.clear();
	m_objectPoints.resize(m_imagePoints.size(), objPts);

}
#if 0
void CCamera::boudleAdjustNonLinear(float unitLength, const CvSize &patternSize)
{
	//unsuccessful
	fprintf(stderr, "boudleAdjustNonLinear is not complete!\n");
	return;

	assert(m_imagePoints.size() == m_objectPoints.size());
	assert(m_imagePoints.size() != 0);

	double rot[9], trans[3];
	vector<map<int, double> > matrix_vec;
	vector<double> right_vec;

	//computer real target position

	// 15 * 9 * 2 X Y
	// 15 * 9 * 2 + n * 9 + (0,...8)
	// r0 r1 r3 r4 r6 r7 t0 t1 t2
	//  0  1  2  3  4  5  6  7  8 

	for (int i = 0; i<m_imagePoints.size(); ++i)
	{
		vector<Point2f> unDistortedPts = m_imagePoints[i];
		//x' in opencv
		cv::undistortPoints(Mat(unDistortedPts), Mat(unDistortedPts), m_intrinsic, m_distortionCoeffs);
		cv::Mat rMat(3, 3, CV_64F, rot);
		cv::Rodrigues(m_RM[i], rMat);

		trans[0] = m_TM[i].at<double>(0, 0);
		trans[1] = m_TM[i].at<double>(1, 0);
		trans[2] = m_TM[i].at<double>(2, 0);

		int startIdx = patternSize.width * patternSize.height * 2 + 9 * i;		
		for (int y = 0; y<patternSize.height; ++y)
		{
			for (int x = 0; x<patternSize.width; ++x)
			{
				int idx = y*patternSize.width+x;
				Point2f p0 = unDistortedPts[idx];
				double X = x * unitLength;
				double Y = y * unitLength;

				//first equation
				{
					map<int, double> left_map;
					double right_val = 0.0;

					double f = (rot[0]-p0.x*rot[6])*X + (rot[1]-p0.x*rot[7])*Y - p0.x*trans[2] + trans[0];
					double fX = rot[0]-p0.x*rot[6];
					double fY = rot[1]-p0.x*rot[7];
					double fr0 = X;
					double fr1 = Y;
					double fr6 = -p0.x * X;
					double fr7 = -p0.x * Y;
					double ft2 = -p0.x;
					double ft0 = 1.0;

					left_map[2*idx]   = fX;
					left_map[2*idx+1] = fY;
					left_map[startIdx+0] = fr0;
					left_map[startIdx+1] = fr1;
					left_map[startIdx+4] = fr6;
					left_map[startIdx+5] = fr7;
					left_map[startIdx+6] = ft0;
					left_map[startIdx+8] = ft2;
					
					right_val = -f;
					matrix_vec.push_back(left_map);
					right_vec.push_back(right_val);
				}
				
				//second equation
				{
					map<int, double> left_map;
					double right_val = 0.0;

					double f = (rot[3]-p0.y*rot[6])*X + (rot[4]-p0.y*rot[7])*Y - p0.y*trans[2] + trans[1];
					double fX = rot[3]-p0.y*rot[6];
					double fY = rot[4]-p0.y*rot[7];
					double fr3 = X;
					double fr4 = Y;
					double fr6 = -p0.y * X;
					double fr7 = -p0.y * Y;
					double ft2 = -p0.y;
					double ft1 = 1.0;

					left_map[2*idx]   = fX;
					left_map[2*idx+1] = fY;
					left_map[startIdx+2] = fr3;
					left_map[startIdx+3] = fr4;
					left_map[startIdx+4] = fr6;
					left_map[startIdx+5] = fr7;
					left_map[startIdx+7] = ft1;
					left_map[startIdx+8] = ft2;

					right_val = -f;
					matrix_vec.push_back(left_map);
					right_vec.push_back(right_val);
				}
			}
		}
	}
	assert(matrix_vec.size() == right_vec.size());
	vector<double> result;
	result.resize(m_objectPoints[0].size()*2 + m_imagePoints.size() * 9);
	bool isOK = CvxSF::LeastSquare(matrix_vec, right_vec, result);

	if (isOK)
	{
		FILE *pf = fopen("non_linear_ba.txt", "w");
		for (int i = 0; i<patternSize.width * patternSize.height; i += 2)
		{
			fprintf(pf, "%f %f\n", result[2*i], result[2*i+1]);
		}
		fclose(pf);

	}
	
	// calib twice by new 3d position computer reprojection
// 	vector<Point3f> objPts;
// 	for (int i = 0; i<result.size(); i+=2)
// 	{
// 		Point3f p0;
// 		p0.x = result[i];
// 		p0.y = result[i+1];
// 		p0.z = 0.0;
// 		objPts.push_back(p0);
// 	}
// 	m_objectPoints.clear();
// 	m_objectPoints.resize(m_imagePoints.size(), objPts);

}
#endif

void CCamera::saveSphereEulerAngle(const string &filename, float unit_length, int w, int h)
{
	computerEulerAngle(unit_length, w, h);
	assert(m_phiVec.size() == m_thetaVec.size() && m_zDistanceVec.size() == m_phiVec.size());
	
	for (int i = 0; i<m_phiVec.size(); ++i)
	{
		float theta = m_thetaVec[i];
		float phi = m_phiVec[i];
		float z_dis = m_zDistanceVec[i];
		float x_angle = m_eulerAngleVec[i][0];
		float y_angle = m_eulerAngleVec[i][1];
		float z_angle = m_eulerAngleVec[i][2];
		float rod_angle = m_rodriguesAngle[i];
		fprintf(stdout, "%f %f %f ", theta, phi, z_dis);
		fprintf(stdout, "%f\n", rod_angle);
	//	fprintf(stdout, "%f %f %f\n", x_angle, y_angle, z_angle);
	}	
}

void CCamera::visualizeRPR(const CvSize &patternSize, int magFactor)
{
	//from bmvc 2010 robust camera calibration using inaccurate targets
	assert(!m_reprojectPts.empty());
	assert(m_reprojectPts.size() == m_imagePoints.size());
	assert(patternSize.width * patternSize.height == m_reprojectPts[0].size());
	
	IplImage *cornerDist = cvCreateImage(cvSize((patternSize.width + 2) * 50, (patternSize.height + 2) * 50), IPL_DEPTH_8U, 3);
	IplImage *wholeDist  = cvCreateImage(cvSize(m_imageSize.width/2, m_imageSize.height/2), IPL_DEPTH_8U, 3);
	cvSet(cornerDist, cvScalarAll(255));
	cvSet(wholeDist, cvScalarAll(255));

	//draw wholeDist
	for (int i = 0; i<m_reprojectPts.size(); ++i)
	{
		for (int j = 0; j<m_imagePoints[i].size(); ++j)
		{
			CvPoint p1 = cvPoint(cvRound(m_imagePoints[i][j].x/2), cvRound(m_imagePoints[i][j].y/2));
			int dif_x = cvRound((m_reprojectPts[i][j].x - m_imagePoints[i][j].x) * magFactor);
			int dif_y = cvRound((m_reprojectPts[i][j].y - m_imagePoints[i][j].y) * magFactor);
			cvCircle(wholeDist, p1, 1, cvScalar(255, 0, 0));
			cvLine(wholeDist, p1, cvPoint(p1.x + dif_x, p1.y + dif_y), cvScalar(0, 0, 255));
		}
	}

	//draw corner distribution map
	for (int i = 0; i<m_reprojectPts.size(); ++i)
	{
		for (int y = 0; y<patternSize.height; ++y)
		{
			for (int x = 0; x<patternSize.width; ++x)
			{
				int idx = y * patternSize.width + x;
				int dif_x = cvRound((m_reprojectPts[i][idx].x - m_imagePoints[i][idx].x) * magFactor);
				int dif_y = cvRound((m_reprojectPts[i][idx].y - m_imagePoints[i][idx].y) * magFactor);
				CvPoint p1 = cvPoint((x+1)*50, (y+1)*50);
				cvCircle(cornerDist, p1, 1, cvScalar(0, 0, 255));
				cvLine(cornerDist, p1, cvPoint(p1.x + dif_x, p1.y + dif_y), cvScalar(255, 0, 0));
			}
		}
	}
	cvShowImage("reprojection error distribution in image plane", wholeDist);
	cvShowImage("reprojection error distribution in corner place", cornerDist);
	cvSaveImage("whole_dis.png", wholeDist);
	cvSaveImage("corner_dis.png", cornerDist);
	cvReleaseImage(&wholeDist);
	cvReleaseImage(&cornerDist);
}


void CCamera::loadCamera(const string& filename)
{
	FILE *pf = fopen(filename.c_str(), "r");	
	assert(pf);
	float data = 0;
	for (int i = 0; i<3; ++i)
	{
		for (int j = 0; j<3; ++j)
		{
			m_intrinsic.at<double>(i, j) = 0.0;
			fscanf(pf, "%f", &data);
			m_intrinsic.at<double>(i, j) = data;
		}		
	}
	int num = 0;
	fscanf(pf, "%d", &num);
	for (int i = 0; i<num && i<m_distortionCoeffs.rows; ++i)
	{	
		m_distortionCoeffs.at<double>(i, 0) = 0.0;
		fscanf(pf, "%f", &data);
		m_distortionCoeffs.at<double>(i, 0) = data;
	}
	fclose(pf);
}

void CCamera::saveText(const string &videoFileName, const string &fielName)
{
	FILE *pf = NULL;
	if (fielName.empty())
	{ 
		pf = stdout;
	}
	else 
	{
		pf = fopen(fielName.c_str(), "w");
	}
	assert(pf);
	fprintf(pf, "video file name: %s\n\n", videoFileName.c_str());
	fprintf(pf, "camera matrix:\n");
	for (int i = 0; i<3; ++i)
	{
		for (int j = 0; j<3; ++j)
		{
			fprintf(pf, "%15.6f ", m_intrinsic.at<double>(i, j));
		}
		fprintf(pf, "\n");
	}
	fprintf(pf, "\n");

	fprintf(pf, "flags:\n");

	if (m_flags & CV_CALIB_USE_INTRINSIC_GUESS)
	{
		fprintf(pf, "%s\n", string("CV_CALIB_USE_INTRINSIC_GUESS").c_str());
	}
	if (m_flags & CV_CALIB_FIX_ASPECT_RATIO)
	{
		fprintf(pf, "%s\n", string("CV_CALIB_FIX_ASPECT_RATIO").c_str());
	}
	if (m_flags & CV_CALIB_FIX_PRINCIPAL_POINT)
	{
		fprintf(pf, "%s\n", string("CV_CALIB_FIX_PRINCIPAL_POINT").c_str());
	}
	if (m_flags & CV_CALIB_ZERO_TANGENT_DIST)
	{
		fprintf(pf, "%s\n", string("CV_CALIB_ZERO_TANGENT_DIST").c_str());
	}

	if (m_flags & CV_CALIB_FIX_FOCAL_LENGTH)
	{
		fprintf(pf, "%s\n", string("CV_CALIB_FIX_FOCAL_LENGTH").c_str());
	}
	if (m_flags & CV_CALIB_FIX_K1)
	{
		fprintf(pf, "%s\n", string("CV_CALIB_FIX_K1").c_str());
	}
	if (m_flags & CV_CALIB_FIX_K2)
	{
		fprintf(pf, "%s\n", string("CV_CALIB_FIX_K2").c_str());
	}
	if (m_flags & CV_CALIB_FIX_K3)
	{
		fprintf(pf, "%s\n", string("CV_CALIB_FIX_K3").c_str());
	}
	if (m_flags & CV_CALIB_FIX_K4)
	{
		fprintf(pf, "%s\n", string("CV_CALIB_FIX_K4").c_str());
	}
	if (m_flags & CV_CALIB_FIX_K5)
	{
		fprintf(pf, "%s\n", string("CV_CALIB_FIX_K5").c_str());
	}
	if (m_flags & CV_CALIB_FIX_K6)
	{
		fprintf(pf, "%s\n", string("CV_CALIB_FIX_K6").c_str());
	}
	if (m_flags & CV_CALIB_RATIONAL_MODEL)
	{
		fprintf(pf, "%s\n", string("CV_CALIB_RATIONAL_MODEL").c_str());
	}
	fprintf(pf, "\n");
	

	fprintf(pf, "distortion:\n");
	for (int i = 0; i<m_distortionCoeffs.rows; ++i)
	{
		fprintf(pf, "%f ", m_distortionCoeffs.at<double>(i, 0));
	}
	fprintf(pf, "\n\n");

	fprintf(pf, "principal point:\n");
	fprintf(pf, "%15.6f %f\n\n\n", m_principalPoint.x, m_principalPoint.y);

	fprintf(pf, "focal length:\n");
	fprintf(pf, "%15.6f\n\n", m_focalLength);
	fprintf(pf, "aspect ratio:\n%15.6f\n\n", m_aspectRatio);

	fprintf(pf, "object position: rotate vector and translation vector\n");
	for (int i = 0; i<m_RM.size() && i<m_TM.size(); ++i)	
	{
		fprintf(pf, "%.6f %.6f %.6f \t", m_RM[i].at<double>(0, 0), m_RM[i].at<double>(1, 0), m_RM[i].at<double>(2, 0));
		fprintf(pf, "%.6f %.6f %.6f\n", m_TM[i].at<double>(0, 0), m_TM[i].at<double>(1, 0), m_TM[i].at<double>(2, 0));
	}
	fprintf(pf, "\n\n");

	//theta
	for (int i = 0; i<m_RM.size(); ++i)
	{
		double r33[9];
		cv::Mat rMat(3, 3, CV_64F, r33);
		cv::Rodrigues(m_RM[i], rMat);		

		vec3f p0(0, 0, 0);
		vec3f p1(1, 0, 0);
		vec3f p2(0, 1, 0);
		float angle = 0;
		intersectionAngle(p0, p1, p2, r33, angle);
		angle *= 180.0/CV_PI;
		fprintf(pf, "%d angle = %15.6f\n\n", i, angle);

	}
	if (pf != stdout)
	{
		fclose(pf);
	}

}
void CCamera::saveResult(const string& filename)
{
	CCamera::saveCameraParams(filename, m_imageSize, m_flags,
		m_intrinsic, m_distortionCoeffs, m_RM, m_TM, m_reprojErrs, m_imagePoints, m_totalAvgErr, 
		m_principalPoint, m_focalLength, m_aspectRatio);
}



bool CCamera::getChessboardPoints(const CvSize &boardSize)
{
	m_currentCorners2d.resize(boardSize.width * boardSize.height);
	int found = cv::findChessboardCorners( m_currentFrame, boardSize, m_currentCorners2d,	
										   CV_CALIB_CB_FAST_CHECK);
	// improve the found corners' coordinate accuracy
	if(found)
	{
		cv::cornerSubPix( m_currentFrame, m_currentCorners2d, Size(11,11), Size(-1,-1), 
						  TermCriteria( CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, 30, 0.1 ));
//		cout<<"found chessboard"<<endl;

#if CVX_SHOW_CHESSBOARD
		Mat colorMat = Mat(m_currentFrame.rows, m_currentFrame.cols, CV_8UC1);
		cv::cvtColor(m_currentFrame, colorMat, CV_GRAY2BGR);
		cv::drawChessboardCorners(colorMat, boardSize, cv::Mat(m_currentCorners2d), true);
		cv::imshow(string("find corners"), colorMat);		
		cvWaitKey(1);
#endif		
		return true;
	}
	else
	{
#if CVX_SHOW_CHESSBOARD
		cv::imshow(string("find corners"), m_currentFrame);
		cvWaitKey(1);
#endif		
//		fprintf(stderr, "can not find chessboard from image %s  %d\n", __FILE__, __LINE__);
		return false;
	}
}

void CCamera::setCorners3d(const vector<Point3f>& pts3d)
{
	m_objectPoints.clear();
	m_objectPoints.resize(m_imagePoints.size(), pts3d);
}

void CCamera::initCamera()
{
	m_intrinsic    = Mat::eye(3, 3, CV_64F);
	m_intrisicGuess = Mat::eye(3, 3, CV_64F);
	m_distortionCoeffs  = Mat::zeros(8, 1, CV_64F);
	m_parameterError = Mat::zeros(8, 1, CV_64F);
	m_stdDev = Mat::zeros(2, 1, CV_64F);
	m_aspectRatio = 1.0;
	m_flags = 0;

	m_apertureWidth = 4.8;
	m_apertureHeight = 3.6;
	m_focalLength = -1.0;
	m_principalPoint = cvPoint(1920, 1080);

}

void CCamera::drawChessboardCorners( Mat& image, const vector<Point2f> & corners)
{
	for (int i = 0; i<corners.size(); ++i)
	{
		int x = cvRound(corners[i].x);
		int y = cvRound(corners[i].y);

		 uchar* ptr = image.ptr<unsigned char>(y, x);
		 ptr[0] = 0;
		 ptr[1] = 0;
		 ptr[2] = 255;
	}


}

void CCamera::setFlags(int flag)
{
	m_flags = flag;
}

void CCamera::addFeature2dAnd3d(const vector<Point2f> &pts, const vector<Point3f> &pts3d)
{
	assert(pts.size() == pts3d.size());
	assert(pts.size() >= 4);
	m_imagePoints.push_back(pts);
	m_objectPoints.push_back(pts3d);
}

void CCamera::GetResult(char *buf)
{
	assert(buf);
	sprintf(buf, "Focal length:\r\n%.2f \r\nAspect Ratio: \r\n%.4f \r\nPrincipal Point:\r\n%.2f   %.2f\r\n\r\nRadial distortion:\r\n %.3f %.3f %.3f\r\nTangential distortion: \r\n %.3f %.3f\r\n reproj error:\r\n %.3f\r\n", 
			m_focalLength, m_aspectRatio, 
			m_intrinsic.at<double>(0, 2), m_intrinsic.at<double>(1, 2), 
			m_distortionCoeffs.at<double>(0, 0)/*K1*/, m_distortionCoeffs.at<double>(1, 0)/*K2*/, m_distortionCoeffs.at<double>(4, 0),/*K3*/
			m_distortionCoeffs.at<double>(2, 0), m_distortionCoeffs.at<double>(3, 0), m_totalAvgErr);

}

void CCamera::GetIntrinsicParameter(char *buf)
{
	assert(buf);
	//focal length
	float fx = m_intrinsic.at<double>(0, 0);
	float fy = m_intrinsic.at<double>(1, 1);

	//principle point
	float cx = m_intrinsic.at<double>(0, 2);
	float cy = m_intrinsic.at<double>(1, 2);

	//distortion
	float k1, k2, p1, p2, k3;
	k1 = m_distortionCoeffs.at<double>(0, 0);
	k2 = m_distortionCoeffs.at<double>(1, 0);
	p1 = m_distortionCoeffs.at<double>(2, 0);
	p2 = m_distortionCoeffs.at<double>(3, 0);
	k3 = m_distortionCoeffs.at<double>(4, 0);

	//parameter error
	float fx_e, fy_e, cx_e, cy_e, k1_e, k2_e, p1_e, p2_e, k3_e = 0.0;
	fx_e = m_parameterError.at<double>(0, 0);
	fy_e = m_parameterError.at<double>(1, 0);
	cx_e = m_parameterError.at<double>(2, 0);
	cy_e = m_parameterError.at<double>(3, 0);
	k1_e = m_parameterError.at<double>(4, 0);
	k2_e = m_parameterError.at<double>(5, 0);
	p1_e = m_parameterError.at<double>(6, 0);
	p2_e = m_parameterError.at<double>(7, 0);
	if (m_flags | CV_CALIB_FIX_K2)
	{
		k2_e = 0.0;
		k3_e = 0.0;
	}

	float re_proj_err;
	re_proj_err = m_totalAvgErr;

	float f_phy;
	f_phy = m_focalLength;	
	sprintf(buf, "%8.2f %8.2f %8.2f %8.2f %8.6f %8.6f %8.6f %8.6f %8.6f\r\n  %8.2f  %8.2f  %8.2f  %8.2f %8.6f %8.6f %8.6f %8.6f %8.6f\r\n %8.2f %8.2f\r\n", 
		    fx, fy, cx, cy, k1, k2, p1, p2, k3, fx_e, fy_e, cx_e, cy_e, k1_e, k2_e, p1_e, p2_e, k3_e, re_proj_err, f_phy);

}

void CCamera::GetOrientation(CCamera &otherCamera, bool useEightPts, char *buf)
{
	assert(buf);
	Mat R, T, E, F;
	vector<vector<Point3f>> objPtsVec;
	vector<vector<Point2f>> leftImagePtsVec;
	vector<vector<Point2f>> rightImagePtsVec;

	int imageIdx = 0;
	if(useEightPts)
	{
		int selectPtsIdx[8] = {0, 14, 15*8, 15*9-1, 2*15+4, 2*15+8, 6*15+4, 6*15+8};
		vector<Point3f> ptsVec;
		vector<Point2f> leftImagePts;
		vector<Point2f> rightImagePts;
		for (int i = 0; i<8; ++i)
		{
			ptsVec.push_back(m_objectPoints[imageIdx][selectPtsIdx[i]]);
			leftImagePts.push_back(m_imagePoints[imageIdx][selectPtsIdx[i]]);
			rightImagePts.push_back(otherCamera.m_imagePoints[imageIdx][selectPtsIdx[i]]);
		}

		objPtsVec.push_back(ptsVec);
		leftImagePtsVec.push_back(leftImagePts);
		rightImagePtsVec.push_back(rightImagePts);
	}
	else
	{
		objPtsVec.push_back(m_objectPoints[imageIdx]);
		leftImagePtsVec.push_back(m_imagePoints[imageIdx]);
		rightImagePtsVec.push_back(otherCamera.m_imagePoints[imageIdx]);
	}


	//computer extrinsic matrix
	cv::stereoCalibrate(objPtsVec,
		leftImagePtsVec, rightImagePtsVec,
		m_intrinsic, m_distortionCoeffs,
		otherCamera.m_intrinsic, otherCamera.m_distortionCoeffs,
		m_imageSize, R, T, E, F,
		TermCriteria(TermCriteria::COUNT+TermCriteria::EPS, 30, 1e-6),
		CALIB_FIX_INTRINSIC);

	Mat tR, tQ;
	Vec3d euler = cv::RQDecomp3x3(R, tR, tQ);
	sprintf(buf, "Rotate euler angle: %.4f  %.4f  %.4f   Translation %.4f  %.4f  %.4f\r\n", 
		euler[0], euler[1], euler[2],  
		T.at<double>(0, 0), T.at<double>(1, 0), T.at<double>(2, 0));

}

void CCamera::configFlags()
{
	// #define CV_CALIB_USE_INTRINSIC_GUESS  1
	// #define CV_CALIB_FIX_ASPECT_RATIO     2
	// #define CV_CALIB_FIX_PRINCIPAL_POINT  4   not support
	// #define CV_CALIB_ZERO_TANGENT_DIST    8
	// #define CV_CALIB_FIX_FOCAL_LENGTH 16      only support in stereo calib
	// #define CV_CALIB_FIX_K1  32
	// #define CV_CALIB_FIX_K2  64
	// #define CV_CALIB_FIX_K3  128
	// #define CV_CALIB_FIX_K4  2048
	// #define CV_CALIB_FIX_K5  4096
	// #define CV_CALIB_FIX_K6  8192
	// #define CV_CALIB_RATIONAL_MODEL 16384


	if( m_flags & CV_CALIB_FIX_ASPECT_RATIO )
	{
		//fx = aspectRatio, fy = 1.0, so the aspect ratio is fixed
		m_intrinsic.at<double>(0,0) = m_aspectRatio;
		m_intrinsic.at<double>(1,1) = 1.0;
	}
	if (m_flags & CV_CALIB_USE_INTRINSIC_GUESS)
	{
	//	m_intrinsic= m_intrisicGuess;
	}
	if (m_flags & CV_CALIB_FIX_PRINCIPAL_POINT)
	{
		m_intrinsic.at<double>(0, 2) = (m_imageSize.width - 1)/2.0;
		m_intrinsic.at<double>(1, 2) = (m_imageSize.height - 1)/2.0;
	}

}

//calibration from opencv 2.2 sample 
bool CCamera::calibration(const vector<vector<cv::Point3f> > &objectPoints, 
						  const vector<vector<cv::Point2f> > &imagePoints,
						  CvSize imageSize, int flags, Mat& cameraMatrix, Mat& distCoeffs,
						  vector<Mat>& rvecs, vector<Mat>& tvecs)
{
	//test 
#if 0
	{
		IplImage *grayImage = cvLoadImage("store_img\\2\\left0.png", 0);
		assert(grayImage);
		vector<vector<cv::Point> > contours;
		contours.resize(0);
		//stet one: get 4 corner polygon
		Mat threshImage = cv::Mat(grayImage, true);
		cv::adaptiveThreshold(Mat(grayImage), threshImage, 255, ADAPTIVE_THRESH_MEAN_C, THRESH_BINARY_INV, 7, 7);
		//hierarchy is next, previous, first child, parent
		cv::findContours(threshImage, contours, CV_RETR_TREE, CV_CHAIN_APPROX_NONE);
	}
#endif
//	rvecs.resize(0);
//	tvecs.resize(0);
	for (int i = 0; i<imagePoints.size(); ++i)
	{
		rvecs.push_back(Mat(3, 1, CV_64F));
		tvecs.push_back(Mat(3, 1, CV_64F));
	}
	double rms = cv::calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix,
									 distCoeffs, rvecs, tvecs, flags);	
//	printf("re-projection error reported by calibrateCamera: %g\n", rms);

	bool ok = cv::checkRange(cameraMatrix) && cv::checkRange(distCoeffs);
	return ok;
}

//computer Reprojection Error from openCV 2.2 sample
double CCamera::computeReprojectionErrors(const vector<vector<Point3f> >& objectPoints,
 										   const vector<vector<Point2f> >& imagePoints,
 										   const vector<Mat>& rvecs, const vector<Mat>& tvecs,
 										   const Mat& cameraMatrix, const Mat& distCoeffs,
 										   vector<float>& perViewErrors,
										   vector<vector<Point2f> >& reprojPts)
 {
 	
 	int totalPoints = 0;
 	double totalErr = 0;
 	perViewErrors.resize(objectPoints.size());
	reprojPts.resize(objectPoints.size());
 
 	for(int i = 0; i < (int)objectPoints.size(); i++ )
 	{
		vector<Point2f> imagePointsProj(objectPoints[i].size());
		cv::projectPoints(Mat(objectPoints[i]), rvecs[i], tvecs[i],
 						  cameraMatrix, distCoeffs, imagePointsProj);
		reprojPts[i] = imagePointsProj;

		double err = cv::norm(Mat(imagePoints[i]), Mat(imagePointsProj), CV_L2);
 		int n = (int)objectPoints[i].size();
 		perViewErrors[i] = (float)std::sqrt(err*err/n);
 		totalErr += err*err;
 		totalPoints += n;
 	} 
 	return std::sqrt(totalErr/totalPoints);
 }

void CCamera::saveCameraParams( const string& filename,
								Size imageSize, int flags,
	 const Mat& cameraMatrix, const Mat& distCoeffs,
	 const vector<Mat>& rvecs, const vector<Mat>& tvecs,
	 const vector<float>& reprojErrs,
	 const vector<vector<Point2f> >& imagePoints,
	 double totalAvgErr, const Vec2d &principalPoint, double focalLength, double aspectRatio )
 {
	 FileStorage fs( filename, FileStorage::WRITE );

	 time_t t;
	 time( &t );
	 struct tm *t2 = localtime( &t );
	 char buf[1024];
	 strftime( buf, sizeof(buf)-1, "%c", t2 );

	 fs << "calibration_time" << buf;

	 if( !rvecs.empty() || !reprojErrs.empty() )
		 fs << "nframes" << (int)std::max(rvecs.size(), reprojErrs.size());
	 fs << "image_width" << imageSize.width;
	 fs << "image_height" << imageSize.height;

	 if( flags != 0 )
	 {
		 sprintf( buf, "flags: %s%s%s%s",
			 flags & CV_CALIB_USE_INTRINSIC_GUESS ? "+use_intrinsic_guess" : "",
			 flags & CV_CALIB_FIX_ASPECT_RATIO ? "+fix_aspectRatio" : "",
			 flags & CV_CALIB_FIX_PRINCIPAL_POINT ? "+fix_principal_point" : "",
			 flags & CV_CALIB_ZERO_TANGENT_DIST ? "+zero_tangent_dist" : "" );
		 cvWriteComment( *fs, buf, 0 );
	 }

	 fs << "flags" << flags;
		
	 fs << "camera_matrix" << cameraMatrix;
	 fs << "distortion_coefficients" << distCoeffs;	
	 fs <<"focal_length"<<focalLength;
	 fs <<"principal_point"<<principalPoint;
	 fs <<"aspect_ratio"<<aspectRatio;

// 	 const double focal_length[] = {cameraMatrix.at<double>(0, 0), cameraMatrix.at<double>(1, 1)};
// 	 fs <<"focal"


	 fs << "avg_reprojection_error" << totalAvgErr;
	 if( !reprojErrs.empty() )
		 fs << "per_view_reprojection_errors" << Mat(reprojErrs);

	 if( !rvecs.empty() && !tvecs.empty() )
	 {
		 Mat bigmat((int)rvecs.size(), 6, CV_32F);
		 for( int i = 0; i < (int)rvecs.size(); i++ )
		 {
			 Mat r = bigmat(Range(i, i+1), Range(0,3));
			 Mat t = bigmat(Range(i, i+1), Range(3,6));
			 rvecs[i].copyTo(r);
			 tvecs[i].copyTo(t);
		 }
		 cvWriteComment( *fs, "a set of 6-tuples (rotation vector + translation vector) for each view", 0 );
		 fs << "extrinsic_parameters" << bigmat;
	 }

	 //not very clear about image points
	 //row * col * 2 channels, row = frame number, col = corner number

// 	 if( !imagePoints.empty() )
// 	 {
// 		 Mat imagePtMat((int)imagePoints.size(), imagePoints[0].size(), CV_32FC2);
// 		 for( int i = 0; i < (int)imagePoints.size(); i++ )
// 		 {
// 			 Mat r = imagePtMat.row(i).reshape(2, imagePtMat.cols);
// 			 Mat imgpti(imagePoints[i]);
// 			 imgpti.copyTo(r);
// 		 }
// 		 fs << "image_points" << imagePtMat;
// 	 }
 }
 