namespace il
{
	class WormHoling
	{
	public:
		WormHoling( Mat_<float> &samples, Mat_<int> &responses, IClassifier *classifier,//cl should be an ALREADY trained classifier
			int k=-1, int generations=-1 , int population = -1):cl(*classifier)
		{
			_k = k<0?3:k;
			_generations = generations < 0 ? 100: generations;
			_population = population < 0 ? 100 : population;
			_x = samples;
			_y = responses;
			
			knn =  new KNearest( _x, _y, Mat(), false, _k ) ;

			define_worm_holes();
			
			define_traverses();
			define_radii();
		}
		~WormHoling()
		{
			_wh.clear();
			_tv.clear();
			whereto_set.clear();
			targets.clear();
			impostors.clear();
		}
		Mat_<float> operator()(Mat_<float> &x)
		{
			return apply(x);
		}
		Mat_<float> apply(Mat_<float> &x)
		{
			return apply( _r, x );
		}

	protected:
		//////////////////////////////////////////////////
		//VARIABLES and TYPES
		//////////////////////////////////////////////////
		int _k, _generations,_population;
		Mat_<float> _x;
		Mat_<int> _y;

		Mat_<double> _r; //worm holes radii 
		vector<int> _wh; //worm holes centers
		vector<Mat_<float>> _tv; //traverse vectors

		vector<int>whereto_set;//set of possible worm hole destinations
		vector<vector<Mat_<float>>> targets;
		vector<vector<Mat_<float>>> impostors;
		Ptr<KNearest> knn;
		IClassifier &cl;
		//////////////////////////////////////////////////////////////////////////////
		//METHODS
		/////////////////////////////////////////////////////////////////////////////
		Mat_<float> apply( const Mat_<double> &sigma, const Mat_<float> &x)
		{
			Mat_<float> mx = x.clone();
			double dist,s2,temp;

			for(int i = 0 ; i < x.rows ; ++i)
			{
				dist = numeric_limits<double>::max();
				int idx = 0;
				for(int k = 0 ; k < _wh.size() ; ++k)
				{
					temp = d2(x.row(i),_x.row(_wh[k]));
					if(temp < dist)
					{
						idx = k;
						dist = temp;
					}
				}

				s2 = sigma(idx,0);
				s2 *= s2;
				if(s2!=s2)
					cout << "s2 is NAN. idx="<<idx<< " sigma.rows:"<<sigma.rows << " sigma.cols"<< sigma.cols<< endl;
				if(s2<1e-15)
					continue;
				else
					mx.row(i) = mx.row(i) + exp(-dist/s2)*_tv[idx];
			}

			return mx;
		}

		//Simply, warm holes are the training points that are misclassified by knn
		void define_worm_holes()
		{
			cout << "Defining worm holes...\n";
			vector<int> wormholes;
			vector<int> negative_wormholes;
			for( int i = 0; i < _x.rows ; ++i )
			{
				vector<Mat_<float>> iimpostors;
				const float **neighbors = new const float*[_k];
				Mat_<float> neigh_resp = Mat_<float>(1,_k);
				
				knn->find_nearest( _x.row(i), _k, 0, neighbors, &neigh_resp);
				float response = cl(_x.row(i));

				if( _y(i,0)!= int(response) )
				{
					wormholes.push_back(i);
					//Identify impostors
					for(int k = 0 ; k < _k ; ++k)
						if(_y(i,0)!=int(neigh_resp(0,k)))
						{
							Mat_<float> imp(1,_x.cols);
							imp.data = (uchar *)neighbors[k];
							iimpostors.push_back(imp.clone());
							assert(check_mat<float>(iimpostors.back()));							
						}
					impostors.push_back(iimpostors);				

					assert( neigh_resp.rows*neigh_resp.cols );
					assert( check_mat<float>(neigh_resp) );
				}
				else
					whereto_set.push_back(i);//this is gonna be used to compute the traverse vectors
				delete[] neighbors;
			}
			cout << "No of worm holes: "<< wormholes.size() << endl;
			
			cout << "Identify worm holes'targets\n";
			
			vector<Ptr<KNearest>> knn_for_class;
			vector<int> classes;

			train_auxiliar_knns(_k,knn_for_class,classes,_x,_y);
			
			for( int k = 0 ; k < wormholes.size() ; ++k)
			{
				vector<Mat_<float>> itargets;

				//what is the class index c for the class of _x.row(i) (_y(i,0))?
				int c = 0;
				while(true)
				{
					if(	_y(wormholes[k],0)==classes[c] )
						break;
					++c;
				}

				//check
				assert(classes[c]==_y(wormholes[k],0));
				
				const float **neighbors = new const float*[_k];

				float response = knn_for_class[c]->find_nearest( _x.row(wormholes[k]),
															_k, (Mat *)0, 
															neighbors );
				
				//check
				assert(abs(response-classes[c])<1e-8);

				//record 
				for(int k = 0 ; k < _k ; ++k)
				{
					Mat_<float> targ(1, _x.cols);
					targ.data = (uchar *)neighbors[k];
					itargets.push_back( targ.clone() );
					assert( check_mat<float>(itargets.back()) );
				}

				targets.push_back( itargets );
				
				delete[] neighbors;
			}
			knn_for_class.clear();
			classes.clear();
			_wh = wormholes;
		}

		//Finds the nearest correctly classified from the same class 
		//for each wormhole and then define a traverse
		void define_traverses()
		{
			cout << "Defining traverse vectors...\n";
			vector<Mat_<float>> traverse_vectors;
			
			int ex_k = 1;
			vector<int> classes;
			vector<Ptr<KNearest>> knn_for_class;

			Mat_<float> whereto_matrix( whereto_set.size(), _x.cols);
			Mat_<int> whereto_responses( whereto_set.size(), 1);
			
			for(int i = 0 ; i < whereto_set.size(); ++i)
			{
				_x.row(whereto_set[i]).copyTo(whereto_matrix.row(i));
				whereto_responses(i,0) = _y(whereto_set[i],0);
			}

			train_auxiliar_knns( ex_k, knn_for_class,
								 classes, whereto_matrix,
								 whereto_responses );

			for(int k = 0 ; k < _wh.size() ; ++k)
			{
				int c = 0;
				while(true)
				{
					if(	_y(_wh[k],0)==classes[c] )
						break;
					++c;
				}

				//check
				assert( classes[c]==_y(_wh[k],0) );

				const float **neighbors = new const float*[ex_k];
				knn_for_class[c]->find_nearest( _x.row(_wh[k]), ex_k, (Mat*)0, neighbors );

				Mat_<float> whereto( 1, _x.cols, (float *)neighbors[0], _x.cols*sizeof(float) );

				traverse_vectors.push_back( Mat_<float>(whereto - _x.row(_wh[k])) );
				delete[] neighbors;
				
			}
			knn_for_class.clear();
			classes.clear();
			_tv = traverse_vectors;
		}

		void define_radii()
		{
			Mat_<double> s( _wh.size(), 1 );
			s = 1;

			Optimizer opt( s.rows, _population, this);
			opt.solve( _generations, s);

			_r = opt.solution();
		}

		double radii_energy(const Mat_<double> &a)
		{
			int k, j, l;
			float c = 0, z, w, v;
			Mat_<float> xk;
			for( k = 0 ; k < _wh.size() ; ++k )
			{
				apply(a,_x.row(_wh[k])).copyTo(xk);
				for( j = 0 ; j < targets[k].size() ; ++j ) 
				{
					w = d2( xk, apply(a,targets[k][j]));
					c += w;
					
					for(l = 0 ; l < impostors[k].size() ; ++l)
					{
						v = d2( xk, apply( a, impostors[k][l]) );
						z = 1 + w -	v;
						assert(check_number<float>(z));
						if(z>0)
							c += z;
					}
				}
			}
			double t = norm(a);
			t = t>0?1./t:0;
			return c+t;
		}
		
		//...
		//Define a CMA-ES optimizer for my energy function
		class Optimizer: public CMASolver
		{
		public:
			Optimizer(int dim, int pop, WormHoling *wh):CMASolver(dim,pop), callback(wh){}
			double energyFunction(const Mat_<double> &trial)
			{
				return callback->radii_energy(trial);
			}
			void generationCompleted()
			{
				/*cout << "current raddii:\n";
				print_mat<double>(this->bestSolution.t());*/
			}
		protected:
			WormHoling *callback;
		};

		void train_auxiliar_knns(int _k, vector<Ptr<KNearest>> &knn_for_class,
								vector<int> &classes, Mat_<float> &_x, Mat_<int> &_y)
		{
			vector<vector<int>> indexes_for_class;
			vector<Mat_<float>> training_data_for_class;
			vector<Mat_<int>> responses_for_class;
			list<int> classlist;

			for(int i = 0 ; i < _y.rows ; ++i)
				classlist.push_back( _y(i,0) );

			classlist.sort();
			classlist.unique();

			//and just because is nice to be able to index in the following loop:
			classes.assign( classlist.begin(), classlist.end() ); 

			for(int c = 0 ; c < classlist.size() ; ++c)
			{
				vector<int> class_idx;

				for(int i = 0 ; i < _x.rows ; ++i)
					if( _y(i,0) == classes[c] )
						class_idx.push_back( i );

				indexes_for_class.push_back( class_idx );
			}

			for(int c = 0 ; c < indexes_for_class.size() ; ++c)
			{
				training_data_for_class.push_back( 
								Mat_<float>(indexes_for_class[c].size(), _x.cols) );
				responses_for_class.push_back( 
								classes[c]*Mat_<int>::ones(indexes_for_class[c].size(), 1) );
			
				for(int i = 0 ; i < indexes_for_class[c].size() ; ++i)
					_x.row( indexes_for_class[c][i] ).copyTo(training_data_for_class[c].row(i) );
			}

			for(int c = 0 ; c < classes.size() ; ++c)
				knn_for_class.push_back( Ptr<KNearest>(new KNearest(training_data_for_class[c],
									responses_for_class[c],
									Mat(), false, _k)) );

			training_data_for_class.clear();
			responses_for_class.clear();
			indexes_for_class.clear();

			return;
		}
		
	};

	int test_wormholing(int argc, char **argv)
	{
		try
		{
			CmdLine cmd("This program compares standard knn with and without the use of wormholing.",' ', "0.1");
			ValueArg<int> classifier_code("","classifier_code","Which classifier should be combined with Worm Holing(1 SVM or 2 KNN)?",false,1,"integer");
			ValueArg<int> verbose("v","verbose","Verbose level",false,1,"integer");
			ValueArg<int> K("k","K","Number of neighbors in the KNN algorithm.",false,5,"integer");
			ValueArg<int> solver("s","solver","Which minimization algorithm to use: 3=DE;4=CMAES.",
				false,4,"integer");
			ValueArg<int> population_size("p","population_size","Size of the random population.",false,30,"integer");
			ValueArg<int> generations("g","generations","Number of optimization generations.",false,30,"integer");
			ValueArg<string> training_file("t", "training_file", "Training file in the svmlight format.", false,
				"C:\\Development\\datasets\\usps", "string");
			ValueArg<string> test_file("e", "test_file", "Test file in the svmlight format.", false,
				"C:\\Development\\datasets\\usps.t", "string");

			cmd.add(classifier_code);
			cmd.add(verbose);
			cmd.add(K);
			cmd.add(solver);
			cmd.add(population_size);
			cmd.add(generations);
			cmd.add(training_file);
			cmd.add(test_file);

			cmd.parse(argc,argv);

			Mat_<float> samples, val_samples, test_samples;
			Mat_<int> responses, val_responses, test_responses;

			read_from_svmlight_formated_file( training_file.getValue().c_str(), samples, responses );
			read_from_svmlight_formated_file( test_file.getValue().c_str(), test_samples, test_responses );
			
			//first we need to assure test samples have same dimensions as the training samples
			if(samples.cols < test_samples.cols)
			{
				test_samples = test_samples( Range(0,test_samples.rows), Range(0,samples.cols) ); 
			}else if(samples.cols > test_samples.cols)
			{
				Mat_<float> expansion(test_samples.rows, samples.cols);
				expansion = 0;
				test_samples.copyTo( expansion( Rect(Point(0,0), test_samples.size() ) ) );
				test_samples = expansion;
			}

			//check
			assert( test_samples.cols == samples.cols );

			split_data_set(test_samples,test_responses,val_samples,val_responses, 0.95);

			assert( check_mat<float>(test_samples) );
			assert( check_mat<int>(test_responses) );
			assert( check_mat<float>(val_samples) );
			assert( check_mat<int>(val_responses) );

			WrapSVM svm( samples, responses, 0.05 );
			WrapKNN knn( samples, responses, K.getValue() );
			
			IClassifier *cl = classifier_code.getValue()==1? (IClassifier *)&svm:(IClassifier *)&knn;
			
			cout << "Wormholing computation ...\n";
			WormHoling wh(	val_samples, val_responses, cl,
							K.getValue(), generations.getValue(),
							population_size.getValue()
							);			
			
			//Training error
	
			float tol = 0.1;
			int error = 0, standardknn_error = 0, svm_error = 0;
			for(int i = 0 ; i < samples.rows ; ++i)
			{
				float prediction = knn( wh(samples.row(i)) );
				float prediction_standardknn = knn( samples.row(i));
				float prediction_svm = svm.predict( samples.row(i) );

				if( abs( prediction - responses(i, 0) ) > tol )
					++error;
		
				if( abs( prediction_standardknn - responses(i, 0) ) > tol )
					++standardknn_error;

				if( abs( prediction_svm - responses(i, 0) ) > tol )
					++svm_error;
			}

			cout << "TRAINING ERROR:---------------------------------------------------\n";
			cout << "( WormHoling )Error: " << error << "/" << samples.rows << "( " <<
				float(error)/samples.rows * 100 << "% )" << endl;
	
			cout << "Standard KNN error: " << standardknn_error << "/" << samples.rows <<"( " <<
				float(standardknn_error)/samples.rows * 100 << "% )" << endl;

			cout << "SVM error: " << svm_error << "/" << samples.rows <<"( " <<
				float(svm_error)/samples.rows * 100 << "% )" << endl;
			cout << "------------------------------------------------------------------\n";
		
			//TEST
			cout << "Testing...\n";

			tol = 0.1;
			error = 0; standardknn_error = 0; svm_error = 0;

			for(int i = 0 ; i < test_samples.rows ; ++i)
			{
				float prediction = knn( wh(test_samples.row(i)));
				float prediction_standardknn = knn( test_samples.row(i));
				float prediction_svm = svm.predict( test_samples.row(i) );

				if( abs( prediction - test_responses(i, 0) ) > tol )
					++error;
		
				if( abs( prediction_standardknn - test_responses(i, 0) ) > tol )
					++standardknn_error;

				if( abs( prediction_svm - test_responses(i, 0) ) > tol )
					++svm_error;
			}

			cout << "TESTING ERROR: ---------------------------------------------------\n";
			cout << "( WormHoling )Error: " << error << "/" << test_samples.rows << "( " <<
				float(error)/test_samples.rows * 100 << "% )" << endl;
	
			cout << "Standard KNN error: " << standardknn_error << "/" << test_samples.rows <<"( " <<
				float(standardknn_error)/test_samples.rows * 100 << "% )" << endl;

			cout << "SVM error: " << svm_error << "/" << test_samples.rows <<"( " <<
				float(svm_error)/test_samples.rows * 100 << "% )" << endl;
			cout << "------------------------------------------------------------------\n";
			SVMParams svmparams = svm.get_params();
			cout << "Optimal svm params(through grid search): svm C = " << svmparams.C << "\n" << "svm gamma=" << svmparams.gamma <<endl;
			
		}catch( ArgException &ae)
		{
			cout << "Argument parsing exception: " << ae.what() << endl;
		}	
		return 0;
	}
}