#include <iostream> //some console output
#include <visp/vpPoint.h> //the basic tracker

#include <vector> //store the polygon
#include <visp/vpMomentObject.h> //transmit the polygon to the object
#include <visp/vpMomentCommon.h> //update the common database with the object
#include <visp/vpFeatureMomentCommon.h> //init the feature database using the information about moment dependencies
#include <visp/vpServo.h> //visual servoing task
#include <visp/vpRobotCamera.h>
#include <visp/vpPlane.h>
#include <visp/vpException.h>
#include <limits>
#include<iostream>
//this function converts the plane defined by the cMo to 1/Z=Ax+By+C plane form
void cMoToABC(vpHomogeneousMatrix& cMo, double& A,double& B, double& C){
	vpPlane pl;
	pl.setABCD(0,0,1.0,0);
    pl.changeFrame(cMo);

	if(fabs(pl.getD())<std::numeric_limits<double>::epsilon()){
		std::cout << "Invalid position:" << std::endl;
		std::cout << cMo << std::endl;
		std::cout << "Cannot put plane in the form 1/Z=Ax+By+C." << std::endl;
		throw vpException(vpException::divideByZeroError,"invalid position!");
	}
    A=-pl.getA()/pl.getD();
    B=-pl.getB()/pl.getD();
    C=-pl.getC()/pl.getD();
}

int Controller(double xV[], double yV[],double xoV[], double yoV[], double tV[],double rV[]){

	//corner points in image plane four border corner of the cass board
					std::cout<<"x1= "<<xV[0]<<std::endl;
					std::cout<<"y1= "<<yV[0]<<std::endl;
					std::cout<<"x2= "<<xV[1]<<std::endl;
					std::cout<<"y1= "<<yV[1]<<std::endl;
					std::cout<<"x3= "<<xV[2]<<std::endl;
					std::cout<<"y3= "<<yV[2]<<std::endl;
					std::cout<<"x4= "<<xV[3]<<std::endl;
					std::cout<<"y4= "<<yV[3]<<std::endl;




					//determine how indexing of the corner has been done and assign 4 corners of the edge as four points of the object
					std::cout<<"xo1= "<<xoV[0]<<std::endl;
					std::cout<<"yo1= "<<yoV[0]<<std::endl;
					std::cout<<"xo2= "<<xoV[1]<<std::endl;
					std::cout<<"yo2= "<<yoV[1]<<std::endl;
					std::cout<<"xo3= "<<xoV[2]<<std::endl;
					std::cout<<"yo3= "<<yoV[2]<<std::endl;
					std::cout<<"xo4= "<<xoV[3]<<std::endl;
					std::cout<<"yo4= "<<yoV[3]<<std::endl;

					std::cout<<"tV1= "<<tV[0]<<std::endl;
					std::cout<<"tV1= "<<tV[1]<<std::endl;
					std::cout<<"tV1= "<<tV[2]<<std::endl;

					std::cout<<"rV1= "<<rV[0]<<std::endl;
					std::cout<<"rV1= "<<rV[1]<<std::endl;
					std::cout<<"rV1= "<<rV[2]<<std::endl;




					// These points are observed by a camera
					vpHomogeneousMatrix cMo(tV[0], tV[1], tV[2], rV[0], rV[1], rV[2]); // We set the camera to be 1m far the object
					vpHomogeneousMatrix cdMo(vpHomogeneousMatrix(0.0,0.0,1.0,vpMath::rad(0),vpMath::rad(0),vpMath::rad(0)));






	// Define an object as 4 clockwise points on a plane (Z=0)
				  vpPoint p;
				  vpPoint po;
				  std::vector<vpPoint> vec_p; // vector that contains the 4 points
				  std::vector<vpPoint> vec_p_d; // vector that contains the 4 points
				  double A,B,C,Ad,Bd,Cd;

//				  p.set_X (xV[0])
//				  p.set_Y (yV[0])

				  p.setWorldCoordinates(xoV[0], yoV[0], 0.0); // values in meters
				  p.track(cMo) ;
				  vec_p.push_back(p);
				  p.track(cdMo) ;
				  vec_p_d.push_back(p);

				  p.setWorldCoordinates(xoV[1], yoV[1], 0.0); // values in meters
				  p.track(cMo) ;
				  vec_p.push_back(p);
				  p.track(cdMo) ;
				  vec_p_d.push_back(p);

				  p.setWorldCoordinates(xoV[2], yoV[2], 0.0); // values in meters
				  p.track(cMo) ;
				  vec_p.push_back(p);
				  p.track(cdMo) ;
				  vec_p_d.push_back(p);


				  p.setWorldCoordinates(xoV[3], yoV[3], 0.0); // values in meters
				  p.track(cMo) ;
				  vec_p.push_back(p);
				  p.track(cdMo) ;
				  vec_p_d.push_back(p);




				 // ... update cMo from an image processing

				// Apply the perspective projection to update the points coordinates in the camera plane (these are corner image points)
//				    for(unsigned int i=0; i<vec_p.size(); ++i)
//				      vec_p[i].project(cMo);


				    vpMomentObject cur(6); // Create a source moment object with 6 as maximum order
				  	cur.setType(vpMomentObject::DENSE_POLYGON); // The object is defined by a countour polygon
				  	cur.fromVector(vec_p); // Init the dense object with the source polygon

				  	vpMomentObject dst(6); // Create a destination moment object with 6 as maximum order
				  	dst.setType(vpMomentObject::DENSE_POLYGON); // The object is defined by a countour polygon
				  	dst.fromVector(vec_p_d); // Init the dense object with the destination polygon




				    //init classic moment primitives (for source)
				    vpMomentCommon mdb_cur(vpMomentCommon::getSurface(dst),vpMomentCommon::getMu3(dst),vpMomentCommon::getAlpha(dst)); //Init classic features
				    vpFeatureMomentCommon fmdb_cur(mdb_cur);

				    ////init classic moment primitives (for destination)
				    vpMomentCommon mdb_dst(vpMomentCommon::getSurface(dst),vpMomentCommon::getMu3(dst),vpMomentCommon::getAlpha(dst)); //Init classic features
				    vpFeatureMomentCommon fmdb_dst(mdb_dst);

				    //update+compute moment primitives from object (for destination)
				    mdb_dst.updateAll(dst);
				    //update+compute features (+interaction matrixes) from plane
				    fmdb_dst.updateAll(Ad,Bd,Cd);

				    //define visual servoing task
				    	vpServo task;
				    	task.setServo(vpServo::EYEINHAND_CAMERA);
				    	task.setInteractionMatrixType(vpServo::CURRENT);
				    	task.setLambda(1) ;

				    	task.addFeature(fmdb_cur.getFeatureGravityNormalized(),fmdb_dst.getFeatureGravityNormalized());
				    	task.addFeature(fmdb_cur.getFeatureAn(),fmdb_dst.getFeatureAn());
				    	//the object is NOT symmetric
				    	//select C4 and C6
				    	task.addFeature(fmdb_cur.getFeatureCInvariant(),fmdb_dst.getFeatureCInvariant(),
				    	vpFeatureMomentCInvariant::selectC4() | vpFeatureMomentCInvariant::selectC6());
				    	task.addFeature(fmdb_cur.getFeatureAlpha(),fmdb_dst.getFeatureAlpha());

				    	//param robot
				    	vpRobotCamera robot ;
				    	float sampling_time = 0.010f; // Sampling period in seconds
				    	robot.setSamplingTime(sampling_time);
				    	robot.setPosition(cMo);

				    	do{
				    		robot.getPosition(cMo);
				    		vec_p.clear();
int nbpoints=4;
				    		for (int i = 0 ; i < nbpoints ; i++){
				    			vpPoint p;
				    			p.setWorldCoordinates(xV[i],yV[i],0.0);
				    			p.track(cMo) ;
				    			vec_p.push_back(p);
				    		}
				    		cMoToABC(cMo,A,B,C);

				    		cur.fromVector(vec_p);
				    		//update+compute moment primitives from object (for source)
				    		mdb_cur.updateAll(cur);
				    		//update+compute features (+interaction matrixes) from plane
				    		fmdb_cur.updateAll(A,B,C);

				    		vpColVector v = task.computeControlLaw();
				    		task.print();
				    		robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;
				    		double t = vpTime::measureTimeMs();
				    		vpTime::wait(t, sampling_time * 1000); // Wait 10 ms
				    	}while(task.error.sumSquare()>0.005);
				    	std::cout << "final error=" << task.error.sumSquare() << std::endl;

				    	return 0;
				    }


/*********************************Visual Servo**********************/
