namespace il
{
	struct TestParameters{
	
	public:
		bool compute_triplets;
		int verbose;
		float margin;
		int K;
		int max_dimensions;
		int solver;
		int poly_order;
		int population_size;
		int generations;
		string triplets_file;
		string training_file;
		string test_file;
		string log_file;
		int map_algorithm;
		bool normalize;

	
		TestParameters(){
			compute_triplets = false;
			verbose = 2;
			margin = -1.0;
			K = 3;
			max_dimensions = -1;
			solver = 4;
			poly_order = -1;
			population_size = -1;
			generations = -1;
			triplets_file = "";
			training_file = "";
			test_file = "";
			log_file = "";
			map_algorithm = 1;
			normalize = false;
		};

	};

	class DataMap //Abstract base class for mapping-knn methods (polynomial mapping and lmnn)
	{
	public:
		DataMap(){}
		virtual ~DataMap()
		{
			targets.clear();
			active_set.clear();
		}

		Mat_<float> apply( const Mat_<float> &x)
		{
			Mat_<float> mx = x.clone();
			
			if(do_pca)
				pca->project( mx, mx);
			
			if(do_scaling)
			{
				try
				{
					mx = rescale( scaling_params, mx);
				}catch(exception &e)
				{
					cout << "Exception:" << e.what() <<"\nmx.cols: " << mx.cols << 
						"\n scaling_params dims: " << scaling_params.rows <<
						"X" << scaling_params.cols << endl;
					return Mat();
				}
			}

			mx = map( A, mx);

			return mx;
		}

		int get_dim(){ return _x.cols; }
		int get_k(){ return _k; }
		
	protected:

		//////////////////////////////////////////////////////////
		//Methods
		/////////////////////////////////////////////////////////

		//map and get_seed has to be overriden on the derived class
		virtual Mat_<float> map( const Mat_<float> &a, const Mat_<float> &x ) = 0;
		virtual Mat_<double> get_seed() = 0;

		float map_energy( const Mat_<float> &a )
		{
			int i,j,l;
			float c=0,z,v,w;
			Mat_<float> mx = map(a,_x);

			vector<vector<int>> impostors = identify_impostors(mx);

			for( i = 0 ; i < _x.rows ; ++i )
			{
				for( j = 0 ; j < targets[i].size() ; ++j )
				{
					w = d2( mx.row(i), map( a, _x.row(targets[i][j]) ) );
					c += w;
					for( l = 0 ; l < impostors[i].size() ; ++l )
					{
						v = d2( mx.row(i), mx.row(impostors[i][l]) );
						z = _margin + w - v;
						if(z>0)
							c += z;
					}
				}
			}
			return c;
		}

		void train()
		{			
			//identify targets etc
			setup_data_structures();
			//Main call
			A = apply_solver( _solver );
		}
		
		vector<vector<int>> identify_impostors( const Mat_<float> &mx )
		{
			//impostors.clear();
			//
			//KNearest knn( mx , _y, Mat(), false, _k );
			//
			//assert( check_mat<float>(mx) );
			//const float **neighbors = new const float*[_k];

			//for(int i = 0; i < mx.rows ; ++i)
			//{
			//	vector<Mat_<float>> iimpostors;
			//	
			//	Mat_<float> neigh_resp(1,_k);
			//	
			//	knn.find_nearest( mx.row(i), _k, (Mat *)0, neighbors, &neigh_resp );
			//	
			//	assert( neigh_resp.rows*neigh_resp.cols != 0 );
			//	assert( check_mat<float>(neigh_resp) );
			//	
			//	for(int k = 0 ; k < _k ; ++k)
			//	{
			//		if( _y(i,0)!=int(neigh_resp(0,k)) )
			//		{
			//			Mat_<float> neig( 1, mx.cols, (float *)neighbors[k] );
			//			iimpostors.push_back( neig.clone() );
			//		}	
			//	}

			//	impostors.push_back(iimpostors);
			//}

			//delete[] neighbors;
			
			int no_neighbors = _k;
			flann::KDTreeIndexParams indexparams;
			flann::Index index( mx, indexparams);
			
			Mat_<int> indices( mx.rows, no_neighbors );
			Mat_<float> dists( mx.rows, no_neighbors );
			
			index.knnSearch( mx, indices, dists, no_neighbors, flann::SearchParams() ); 
			vector<vector<int>> imp_indices;

			for(int i = 0 ; i < indices.rows ; ++i )
			{
				vector<int> imp;
				for(int k = 0 ; k < indices.cols ; ++k)
				{
					if(_y(indices(i,k),0)!=_y(i,0))
					{
						imp.push_back(indices(i,k));
					}
				}
				imp_indices.push_back(imp);

			}
			return imp_indices;
		}

		void setup_data_structures()//targets and likely impostors
		{
			cout << "Setting up metadata...\n";

			int64 to,tf;
			vector<int> classes;
			vector<vector<int>> indices_for_class;
			vector< Mat_<float> > training_data_for_class;
			list<int> classlist;

			for(int i = 0 ; i < _y.rows ; ++i)
				classlist.push_back( _y(i,0) );

			classlist.sort();
			classlist.unique();

			//and just because is nice to be able to index in the following loop:
			classes.assign( classlist.begin(), classlist.end() ); 

			for(int c = 0 ; c < classlist.size() ; ++c)
			{
				vector<int> class_idx;

				for(int i = 0 ; i < _x.rows ; ++i)
					if( _y(i,0) == classes[c] )
						class_idx.push_back(i);

				indices_for_class.push_back( class_idx );
			}

			for(int c = 0 ; c < indices_for_class.size() ; ++c)
			{
				training_data_for_class.push_back( 
								Mat_<float>(indices_for_class[c].size(), _x.cols) );
			
				for(int i = 0 ; i < indices_for_class[c].size() ; ++i)
					_x.row( indices_for_class[c][i] ).copyTo(
								training_data_for_class[c].row(i) );
			}

			for(int c = 0 ; c < classes.size() ; ++c)
			{
				flann::Index index( training_data_for_class[c], flann::KDTreeIndexParams() );
				Mat_<int> indices( training_data_for_class[c].rows, _k+1 );
				Mat_<float> dists( indices.size() );
				index.knnSearch( training_data_for_class[c], indices, dists, _k+1, flann::SearchParams());
				
				for(int i = 0 ; i < training_data_for_class[c].rows; ++i)
				{
					vector<int> itargets;
					for(int j = 0 ; j < indices.cols;  ++j)
						if(indices(i,j)!=i)
							itargets.push_back( indices_for_class[c][indices(i,j)] );
					targets.push_back(itargets);
				}

			}

			classes.clear();
			indices_for_class.clear();
			training_data_for_class.clear();
			classlist.clear();

			////Build active set (or load it from a file)
			//if( _compute_triplets )
			//	active_set = identify_active_triplets( Mat() );
			//else
			//	active_set = read_active_set_from_file( _triplets_file );
			// 
			//cout << "active elements : " << active_set.size() << endl;
		}

		void dimensionality_control( const Mat_<float> &_training_data )//This should eventually use something else other than plain PCA
		{
			//PCA dimensionality reduction
			if( _training_data.cols <= _max_components )
			{
				do_pca = false;
				_x = _training_data.clone();
				
				_verbose>0 && cout << "NO PCA performed...\n";
			}else
			{
				do_pca = true;
				pca = new PCA( _training_data, Mat() , CV_PCA_DATA_AS_ROW, _max_components );
				pca->project( _training_data, _x );
				_verbose>0 && cout << "Dimensionality reduced from " << 
										_training_data.cols << " to " << 
										_x.cols <<" using PCA" << endl;
			}
			_verbose>0 && cout << "Training data dimensions: " <<
								_x.rows << " X " << _x.cols << endl;

			if(do_scaling)
			{
				scaling_params = compute_scaling_params( _x );
				_x = rescale( scaling_params, _x );
			}
		}

		list<Vec3i> read_active_set_from_file( const string &filename )
		{
			list<Vec3i> triplets;

			//check file
			FILE * fp = fopen (filename.c_str(), "r");
			if (fp == NULL)
			{
				printf ( "cannot open file %s", filename );
				exit(-1);
			}
			fclose(fp);

			Mat_<Vec3i> tripletsmat( (CvMat*)cvLoad(filename.c_str()) );
		
			assert( tripletsmat.rows>0 && tripletsmat.cols>0 );

			for( int i = 0 ; i < tripletsmat.rows; ++i )
				triplets.push_back( tripletsmat(i,0) );

			return triplets;
		}

		void setup_parameters( const Mat_<float> &_training_data, const Mat_<int> &_responses, TestParameters params)
		{
			//////////////////////////////////////////////////////////////////////
			//Copy parameters to their instance equivalents
			/////////////////////////////////////////////////////////////////////
			do_scaling = params.normalize;
			_verbose = params.verbose;
			_solver = params.solver;
			_max_components = params.max_dimensions;
			map_dims[1] = params.poly_order<0 ? params.max_dimensions : params.poly_order;
			_margin = params.margin;
			_k = params.K;
			_compute_triplets = params.compute_triplets;
			
			_triplets_file = !params.triplets_file.length() ? _TRIPLETS_FILE : params.triplets_file;
			_population_size = params.population_size<0 ? _POPULATION_SIZE : params.population_size;

			_generations = params.generations<0 ? _GENERATIONS : params.generations;
			
			/////////////////////////////////////////////////////////////////////
						
			dimensionality_control( _training_data );
			map_dims[0] = _x.cols;
			_y = _responses;

			_verbose > 0 && cout << "map_dims: " << map_dims[0] << " x "<< map_dims[1] << endl;
		}

		list<Vec3i> identify_active_triplets( const Mat_<float> &a )
		{
			cout << "Identifying active triplets...\n";
			
			int i,j,l;
			float z;
			list<Vec3i> triplets;
			int64 to,tf;
			to = getTickCount();

			for( i = 0 ; i < _x.rows ; ++i )
			{
				for( j = 0 ; j < _k ; ++j )
				{
					for( l = 0 ; l < _x.rows ; ++l )
					{
						if( _y(i,0) != _y(l,0) )
						{
							z = _margin +
								d2( map(a,_x.row(i)), map(a,_x.row(targets[i][j])) ) -
								d2( map(a,_x.row(i)), map(a,_x.row(l)) );

							if( z > 0 )
								triplets.push_back( Vec3i(i,j,l) );	
						}
					}
				}
				if( _verbose>2 && !(i%1000) )
				{
					cout << "[i="<<i<<"] triplets size: " << triplets.size() << "- -" ;
				}
			}

			tf = getTickCount();
			cout << "\npast identify_triplets in " <<
				(tf-to)/getTickFrequency() << " seconds\n";
		
			//////////////////////////////////////////////////////////////////
			cout << "Saving active elements to file...\n";
			list<Vec3i>::iterator it = triplets.begin();
			Mat_<Vec3i> tripletsmat( triplets.size(), 1 );

			int counter = 0;
			for( ; it!=triplets.end() ; ++it)
				tripletsmat( counter++, 0 ) = *it;
			cvSave( _triplets_file.c_str(), (void*)&CvMat(tripletsmat) );
			///////////////////////////////////////////////////////////////////

			return triplets;
		}

		Mat_<float> apply_solver(const int &s)
		{
			Mat_<float> a;
			long int to, tf;
			
			if( s == DIFFERENTIALEVOLUTION )
			{
				cout << "DE not implemented...\n";
			}else if( s == CMAES )
			{
				M_CMA cma = M_CMA(map_dims, _population_size, this);
				Mat_<double> seed = get_seed();
		
				cma.solve( _generations, get_seed() );
				a = cma.get_solution();
				
			}else
			{
				cout <<"-s " << _solver << " IS NOT A VALID SOLVER!\n";	
				throw exception("Non-existing solver code in function" 
								" apply_solver(int s) in file nlmknn.h", __LINE__);
			}

			assert( check_mat<float>(a) );

			if( _verbose>0 ) 
			{
				cout << "\nOptimal mapper params (A): \n";
				print_mat<float>(a);
			}

			return a;
		}

		Vec2i map_dims;
	
		bool do_pca;
		bool do_scaling;
		Ptr<PCA> pca;
	
		Mat_<float> _x;
		Mat_<int> _y;
		Mat_<float> scaling_params;

		//auxiliar data structures
		vector<vector<int>> targets;
		list<Vec3i> active_set;
	
		//parameters
		bool _compute_triplets;
		int _verbose, _k, _solver, _max_components,
			_population_size, _generations;
		float _margin;
		string _triplets_file;
	
		//optimal mapping parameter matrix or vector
		Mat_<float> A;
	
		////////////////////////////////////////////////////////////////////////
		//Internal Types
		////////////////////////////////////////////////////////////////////////
		class M_CMA: public CMASolver 
		{
		public:
			 M_CMA( const Vec2i &dimensions, const int &pop, DataMap *m ) : 
					CMASolver( dimensions[0]*dimensions[1], pop ), 
							   callback(m), dims(dimensions){}
			
			//This function is gonna be called by CMASolver once every generation
			//I use it as a venue to update the active_set in approprite times
			virtual ~M_CMA()
			{
				callback = NULL;
			}
			void generationCompleted()
			{

			}
			
			double energyFunction( const Mat_<double> &trial )
			{
				//Some plubming and return the energy of trial
				assert( dims[0]*dims[1] == trial.rows );
	
				Mat_<float> ftrial;
				trial.convertTo( ftrial, CV_32F );
				ftrial = ftrial.reshape( dims[0] );
	
				assert( ftrial.rows*ftrial.cols == dims[0]*dims[1] );
				return (double)callback->map_energy( ftrial );

			}
	
			Mat_<float> get_solution()
			{
				Mat_<float> a;
				solution().convertTo( a, CV_32F );
				return a.reshape( dims[0] );
			}

		private:
			Vec2i dims;
			DataMap *callback;
		};
	};

	class PolyMap : public DataMap
	{
	public:
		PolyMap( const Mat_<float> &_training_data, const Mat_<int> &_responses, const TestParameters params)
		{
			_verbose>0 && cout << "\nInitializing Polynomial Map\n";
			setup_parameters(_training_data, _responses, params );
			train();
			cout << "Polynomial Map training completed...\n";
		}

	protected:
		Mat_<float> map( const Mat_<float> &a, const Mat_<float> &x )
		{
			if(a.cols*a.rows == 0)
				return x;	
			int i,j,k;
			Mat_<float> mx = x.clone(); //mapped x
			for(i = 0 ; i < mx.rows ; ++i)
				for(j = 0 ; j < mx.cols ; ++j)
					for(k = 0 ; k < map_dims[1] ; ++k)
						mx(i,j) += a(j,k)*pow( x(i,j), k );
			return mx;
		}
		Mat_<double> get_seed()
		{
			return Mat_<double>::zeros( map_dims[0] * map_dims[1], 1);
		}
	};

	class SplineMap:public DataMap
	{
	public:
		SplineMap( const Mat_<float> &_training_data, const Mat_<int> &_responses, const TestParameters params)
		{
			_verbose>0 && cout << "\nInitializing Spline Map\n";
			setup_parameters(_training_data, _responses, params);
			train();
			cout << "Spline Map training completed...\n";
		}

	protected:
		Mat_<float> map( const Mat_<float> &a, const Mat_<float> &x )
		{
			if(a.cols*a.rows == 0)
				return x;	
			int i,j,k;
			Mat_<float> mx = x.clone(); //mapped x
			for(i = 0 ; i < mx.rows ; ++i)
				for(j = 0 ; j < mx.cols ; ++j)
					for(k = 0 ; k < map_dims[1] ; ++k)
					{
						mx(i,j) += a(j,k)*pow( x(i,j), k );
					}
			return mx;
		}
		Mat_<double> get_seed()
		{
			return Mat_<double>::zeros( map_dims[0] * map_dims[1], 1);
		}
	};

	class LMNN:public DataMap
	{

	public:

		LMNN( const Mat_<float> &_training_data, const Mat_<int> &_responses, const TestParameters params )
		{
			cout << "\nInitializing LMNN\n";
			int imax_components = params.max_dimensions==-1 ? _training_data.cols : params.max_dimensions;
			int imap_rank = params.poly_order == -1 ? params.max_dimensions : params.poly_order;
			setup_parameters(_training_data, _responses, params);
			train();
			cout << "LMNN training completed...\n";
		}

	protected:

		Mat_<float> map(const Mat_<float> &a, const Mat_<float> &x)
		{
			if(a.cols*a.rows == 0)
				return x;
			auto mx = x*a;//mapped x
			return mx;
		}

		Mat_<double> get_seed()
		{
			return Mat( Mat_<double>::eye( map_dims[0], map_dims[1] ) ).reshape( 1, map_dims[0]*map_dims[1] );
		}
	};

	
	map<string,int> base_test( TestParameters args )
	{

		map<string,int> errors;
		Mat_<float> samples, test_samples;
		Mat_<int> responses, test_responses;

		read_from_svmlight_formated_file( args.training_file.c_str(), samples, responses );
		if(args.test_file.length())
			read_from_svmlight_formated_file( args.test_file.c_str(), test_samples, test_responses );
		else
			split_data_set(samples, responses, test_samples, test_responses, 0.8);

		//first we need to assure test samples have same dimensions as the training samples
		if( samples.cols < test_samples.cols )
		{
			test_samples = test_samples( Range(0,test_samples.rows), Range(0,samples.cols) ); 
		}else if(samples.cols > test_samples.cols)
		{
			Mat_<float> expansion(test_samples.rows, samples.cols);
			expansion = 0;
			test_samples.copyTo( expansion( Rect(Point(0,0), test_samples.size())) );
			test_samples = expansion;
		}
		
		//check
		cout << "Training ..." << endl;
		assert( test_samples.cols == samples.cols );			
		
		WrapKNN knn( samples, responses, args.K );
		WrapSVM svm( samples, responses );
			
		DataMap *m;
		switch(args.map_algorithm)
		{
		case 1:
			m = new PolyMap( samples, responses, args );
			break;
		case 2:
			m = new LMNN( samples, responses, args);
			break;
		default:
			m = new PolyMap( samples, responses, args );
		}

			
		WrapKNN mknn( m->apply(samples), responses, args.K );
		WrapSVM msvm( m->apply(samples), responses );

		//Training error
		int knn_error = 0, mknn_error = 0, svm_error = 0, msvm_error = 0;
		int prediction_knn, prediction_mknn, prediction_svm, prediction_msvm;

		for(int i = 0 ; i < samples.rows ; ++i)
		{
			prediction_knn = knn( samples.row(i) );
			prediction_mknn = mknn( m->apply(samples.row(i)) );
			prediction_svm = svm( samples.row(i) );
			prediction_msvm = msvm( m->apply(samples.row(i)) );
				
			if( prediction_knn != responses(i, 0) )
				++knn_error;
		
			if( prediction_mknn != responses(i, 0) )
				++mknn_error;

			if( prediction_svm != responses(i, 0) )
				++svm_error;

			if( prediction_msvm != responses(i, 0) )
				++msvm_error;
		}
		
		errors.insert( make_pair("knn on training data", knn_error) );
		errors.insert( make_pair("mapped knn on training data", mknn_error) );
		errors.insert( make_pair("svm on training data", svm_error) );
		errors.insert( make_pair("mapped svm on training data", msvm_error) );
		//testing
			
		knn_error = mknn_error = svm_error = msvm_error = 0;
		for(int i = 0 ; i < test_samples.rows ; ++i)
		{
			prediction_knn = knn( test_samples.row(i) );
			prediction_mknn = mknn( m->apply(test_samples.row(i)) );
			prediction_svm = svm( test_samples.row(i) );
			prediction_msvm = msvm( m->apply(test_samples.row(i)) );

			if( prediction_knn != test_responses(i, 0) )
				++knn_error;
		
			if( prediction_mknn != test_responses(i, 0) )
				++mknn_error;

			if( prediction_svm != test_responses(i, 0) )
				++svm_error;

			if( prediction_msvm != test_responses(i, 0) )
				++msvm_error;
		}
		
		delete m;

		errors.insert(make_pair("knn on test data",knn_error));
		errors.insert(make_pair("mapped knn on test data",mknn_error));
		errors.insert(make_pair("svm on test data",svm_error));
		errors.insert(make_pair("mapped svm on test data",msvm_error));

		errors.insert(make_pair("number of training samples",responses.rows));
		errors.insert(make_pair("number of test samples",test_responses.rows));
		
		return errors;
	}

	int test_datamap_classification(int argc, char **argv)
	{

		CmdLine cmd("Test data mapping for classification algorithms.", ' ', "1.0");

		SwitchArg compute_triplets("c", "comp_triplets", "If present the program will"
														"compute the list of elements"
														" of the training data needed"
														" for cost computation", cmd, true);
		
		ValueArg<int> verbose("v", "verbose", "Verbose level", false, 1, "integer", cmd);
		ValueArg<float> margin("m", "margin", "Margin parameter of the cost function.",
								false, -1, "float", cmd);

		ValueArg<int> K("k", "K", "Number of neighbors in the KNN algorithm.",
							false, 3, "integer", cmd);

		ValueArg<int> max_dimensions("i", "max_dimensions", "If the feature vectors are of dimension greater"
										"than this, the dimension will be truncated using PCA.",
										false, -1, "integer", cmd);

		ValueArg<int> solver("s", "solver", "Which minimization algorithm to use: 3=DE;4=CMAES.",
								false, 4, "integer", cmd);

		ValueArg<int> poly_order("d","degree", "Degree of the mapping polynomial or rank of the LMNN map.",
									false, 3, "integer", cmd);

		ValueArg<int> population_size("p","population_size","Size of the random population.",
										false, 30, "integer", cmd);

		ValueArg<int> generations("g","generations","Number of optimization generations.",
									false, 90, "integer", cmd);

		ValueArg<string> triplets_file( "a", "active_triplets_file", "Self explanatory...",
										false, "active_elements.xml", "string", cmd);

		ValueArg<string> training_file("t", "training_file", "Training file in the svmlight format.",
										false,"C:\\Development\\datasets\\australian.txt", "string", cmd);

		ValueArg<string> test_file("e", "test_file", "Test file in the svmlight format."
													" If this is not provided a fraction"
													"of the training data will be used as"
													"test data.", false,"C:\\Development\\datasets\\australian.txt", "string", cmd);

		ValueArg<string> log_file("", "log_file", "Path to the output log file.", false,
			"", "string", cmd);

		ValueArg<int> map_algorithm("", "algorithm", "Which mapping algorithm to test:"
										"1 PolyMap, 2 LMNN, 3 Wormholing.", false, 1, "integer", cmd);

		SwitchArg normalize("", "normalize", "If present data normalization will be applied.", cmd, false);

		try
		{
			cmd.parse(argc,argv);
		}catch( ArgException &ae )
		{
			cout << "Argument parsing exception: " << ae.what() << endl;
		}

		TestParameters params;

		params.compute_triplets = compute_triplets.getValue();
		params.generations = generations.getValue();
		params.K = K.getValue();
		params.log_file = log_file.getValue();
		params.map_algorithm = map_algorithm.getValue();
		params.margin = margin.getValue();
		params.max_dimensions = max_dimensions.getValue();
		params.normalize = normalize.getValue();
		params.poly_order = poly_order.getValue();
		params.population_size = population_size.getValue();
		params.solver = solver.getValue();
		params.test_file = test_file.getValue();
		params.training_file = training_file.getValue();
		params.triplets_file = triplets_file.getValue();
		params.verbose = verbose.getValue();

		try
		{
			auto errors = base_test( params );
			
			cout << "TRAINING ERROR:---------------------------------------------------\n";			
			cout << "KNN error: " << errors["knn on training data"] << "/" << errors["number of training samples"] <<"( " <<
				float(errors["knn on training data"])/errors["number of training samples"] * 100 << "% )" << endl;
			cout << "PolyMap-KNN error: " << errors["mapped knn on training data"] << "/" << errors["number of training samples"] << "( " <<
				float(errors["mapped knn on training data"])/errors["number of training samples"] * 100 << "% )" << endl;
			cout << "SVM error: " << errors["svm on training data"] << "/" << errors["number of training samples"] <<"( " <<
				float(errors["svm on training data"])/errors["number of training samples"] * 100 << "% )" << endl;
			cout << "PolyMap-SVM error: " << errors["mapped svm on training data"] << "/" << errors["number of training samples"] <<"( " <<
				float(errors["mapped svm on training data"])/errors["number of training samples"] * 100 << "% )" << endl;
			cout << "------------------------------------------------------------------\n";

			cout << "TESTING ERROR: ---------------------------------------------------\n";
			cout << "KNN error: " << errors["knn on test data"] << "/" << errors["number of test samples"] <<"( " <<
				float(errors["knn on test data"])/errors["number of test samples"] * 100 << "% )" << endl;
			cout << "PolyMap-KNN error: " << errors["mapped knn on test data"] << "/" << errors["number of test samples"] << "( " <<
				float(errors["mapped knn on test data"])/errors["number of test samples"] * 100 << "% )" << endl;
			cout << "SVM error: " << errors["svm on test data"] << "/" << errors["number of test samples"] <<"( " <<
				float(errors["svm on test data"])/errors["number of test samples"] * 100 << "% )" << endl;
			cout << "PolyMap-SVM error: " << errors["svm on test data"] << "/" << errors["number of test samples"] <<"( " <<
				float(errors["mapped svm on test data"])/errors["number of test samples"] * 100 << "% )" << endl;
			cout << "------------------------------------------------------------------\n";
				
		}catch( exception &e )
		{
			cout << "Caught something: " << e.what() << endl;
		}

		return 0;
	}

	int grid_test(int argc, char **argv)
	{

		CmdLine cmd("Grid Experiment for Data mapping algorithms.",' ', "1.0");
		ValueArg<string> log_file("l", "log_file", "Logfile definition.",
									false, "grid_exp.txt", "string");

		ValueArg<int> algorithm("a","algorithm","Which algorithm to exercise:"
			          " 1 PolyMap, 2 LMNN, 3 Wormholing.", false, 1, "integer");
		
		cmd.add( log_file );
		cmd.add( algorithm );

		try
		{
			cmd.parse(argc,argv);
		}catch( ArgException &ae )
		{
			cout << "Argument parsing exception: " << ae.what() << endl;
		}

		ofstream log( log_file.getValue().c_str() );

		vector<string> datasets;

		datasets.push_back("C:\\Development\\datasets\\heart.txt");
		datasets.push_back("C:\\Development\\datasets\\liver-disorders.txt");

		datasets.push_back("C:\\Development\\datasets\\diabetes.txt");
		datasets.push_back("C:\\Development\\datasets\\australian.txt");
		
		datasets.push_back("C:\\Development\\datasets\\svmguide4b.txt");
		datasets.push_back("C:\\Development\\datasets\\splice.txt");

		vector<bool> scale(2,false);
		scale[0] = true;
		scale[1] = false;
		
		vector<int> ks(2,0);
		ks[0] = 3;
		ks[1] = 5;

		vector<float> margins(2,0);
		margins[0] = 0.0;
		//margins[1] = 0.1;
		//margins[2] = 1.0;
		margins[1] = 10.0;

		vector<int> populations(1,0);
		populations[0] = 20;
		//populations[1] = 110;

		vector<int> generations(5,0);
		generations[0] = 30;
		for(int i = 1 ; i < generations.size() ; ++i)
			generations[i] = 100*i;

		vector<int> ds(3,0);
		ds[0] = 2;
		ds[1] = 3;
		ds[2] = 4;

		TestParameters params;

		if( log.is_open() )
		{
			log << "Grid PolyMap Experiment\n";
			int counter = 0;
			for(int i = 0 ; i < datasets.size() ; ++i)
				for(int s = 0 ; s < 2 ; ++s)
					for(int j = 0 ; j < ks.size() ; ++j)
						for(int l = 0 ; l < margins.size() ; ++l)
							for(int m = 0 ; m < populations.size() ; ++m)
								for(int p = 0 ; p < ds.size() ; ++p)
									for(int n = 0 ; n < generations.size() ; ++n)
									{
										cout << "------------------------------------------------------------------------------------\n";
										log << "ITERATION[" << counter++ << "] : ";

										map<string,int> errors;

										params.generations = generations[n];
										params.K = ks[j];
										params.normalize = scale[s] ;
										params.training_file = datasets[i] ;
										params.population_size = populations[m] ;
										params.margin = margins[l];
										params.poly_order = ds[p];

										try
										{
											errors = base_test( params );
										}catch(exception &e)
										{
											log << "Exception."<< e.what() << datasets[i] <<
												" " << ks[j] << " " << margins[l] <<
												" " << populations[m] << " " << ds[p] << 
												" " << generations[n] << endl;
											continue;
										}

										log << datasets[i] << " : " << ks[j] << ", " << margins[l] <<
											", " << populations[m] << ", " << ds[p] << 
											", " << generations[n] << "=" ;
										
										cout << datasets[i] << " " << ks[j] << " " << margins[l] <<
											" " << populations[m] << " " << ds[p] << 
											" " << generations[n] << "=" ;

										for(auto it = errors.begin(); it != errors.end() ; ++i){
											log << it->first << ":"<< it->second << ", ";
											cout << it->first << ":"<< it->second << ", ";
										}

										log << endl;
										cout << endl;

									}
		}
		log.close();
		return 0;
	}
}