#include "KNNGeneralizer.h"

#include "MREAgent.h"
using namespace std; 

//debugging info: for detecting memory leaks
#include "Util.h"
#ifdef _DEBUG
   #define new MYDEBUG_NEW
#endif


#define USE_METRIC_LEARNING

KNNGeneralizer::~KNNGeneralizer(void)
{
#ifdef USE_METRIC_LEARNING
	delete metricLearner; 
#endif

	resetTrees(); 
/*
	delete[] nnIdx; 
	delete[] dists; 
	delete[] dataSize; 
*/
	if (otf)
		delete otf; 
}


KNNGeneralizer::KNNGeneralizer(MREAgent *p, int an, int od, taskspec_t &spec)
:learners(an,od), dataPoints(an,od), targets(an,od), reducedDimensions(an,od), TFGenerator(p,an,od,spec)
, MatlabDrawer("KNNGeneralizer")
{
	K = 3;		//using 3 neighbors
	epsilon = 0.05; 
	sigma2 = 0.5*0.5; 

	//first row draws knownness, second row draws points before metric learning, third row draws transformed points. 
	//each column is for one dimension. action is fixed 
	nRow = 3; 
	nCol = od; 



	otf = new OriginalTFGenerator(p,an,od,spec); 

#ifdef USE_METRIC_LEARNING
	metricLearner = new MetricLearner(this); 
#endif

	dataSize = new int[action_number]; 
	dists = new ANNdist[K]; 
	nnIdx = new ANNidx[K]; 
	memset(dataSize, 0, action_number*sizeof(int)); 

	for(int i=0; i< action_number; i++)
	{
		for(int j=0; j< od; j++)	//set the parameters of each learner(each output)
		{
			learners(i,j) = 0; 
			dataPoints(i,j) = 0; 
			targets(i,j) = 0;
			reducedDimensions(i,j) = obs_dim; 
		}
	}
}

void KNNGeneralizer::resetTrees()
{
	for(int i=0; i< action_number; i++)
		for(int j=0; j< obs_dim ; j++)	//set the parameters of each learner(each output)
		{
			if (learners(i,j))
			{
				delete learners(i,j); 
				learners(i,j) = 0; 
			}

			if (dataPoints(i,j))
				annDeallocPts(dataPoints(i,j)); 

			if (targets(i,j))
			{
				delete[]  targets(i,j); 
				targets(i,j) = 0; 
			}
		}
}

void KNNGeneralizer::updatePointsIndividual(std::list<Transition>& l, bool* use,  ANNpointArray& data, Observation_type* & targets,  int action, int dim)
{
	int i = 0; 
	int pos = 0; 
	for(list<Transition>::iterator it=l.begin(); it != l.end(); it++, i++)
	{
		if(! use[i])
			continue; 

		int oAction = (*it).action; 

		if (oAction != action)
			continue; 

		Observation obs = (*it).start; 
		double* tmp = data[pos]; 
		memcpy(tmp, obs, obs_dim*sizeof(Observation_type)); 

		double target = (*it).end[dim]; 
		if (LEARN_DIFFERENCES)
			target -= (*it).start[dim]; 

		targets[pos] = target; 
		pos++; 
	}
}


void KNNGeneralizer::updatePoints(std::list<Transition>& l)
{
	memset(dataSize, 0, action_number*sizeof(int)); 

	bool* use = new bool[l.size()]; 
	eliminateBYShuffle(use, l.size(), 200); 


	//compute how many data for each action, so we can allocate space for them
	int i=0; 
	for(list<Transition>::iterator it=l.begin(); it != l.end(); it++, i++)
		if (use[i])
			dataSize[(*it).action]++; 

	//allocate space for data
	for(int i=0; i< action_number; i++)
		for(int j=0; j< obs_dim ; j++)	//set the parameters of each learner(each output)
		{
			if (dataSize[i]>0)
			{
				dataPoints(i,j) = annAllocPts(dataSize[i], obs_dim); 
				targets(i,j) = new Observation_type[dataSize[i]]; 
			}
		}

	int* pos = new int[action_number]; 
	memset(pos, 0, action_number*sizeof(int)); 

	int ctr = 0; 
	i = 0; 
	for(list<Transition>::iterator it=l.begin(); it != l.end(); it++, i++)
	{
		if(! use[i])
			continue; 

		 ctr++; 

		int action = (*it).action; 
		Observation obs = (*it).start; 
		//put each point in the corresponding places
		for(int j=0; j< obs_dim; j++)
		{
			double* tmp = dataPoints(action,j)[pos[action]]; 
			memcpy(tmp, obs, obs_dim*sizeof(Observation_type)); 

			double target = (*it).end[j]; 
			if (LEARN_DIFFERENCES)
				target -= (*it).start[j]; 

			targets(action, j)[pos[action]] = target; 
		}
		pos[action]++; 
	}

	delete[] pos; 
	delete[] use; 
}


void KNNGeneralizer::batchLearn(std::list<Transition>& history)
{
/*	resetTrees(); 
	updatePoints(history); 

	transformData(); 
*/


	ANNpointArray tmpPoints; 
	Observation_type* tmpTargets; 

	//compute new data size values into a temporary storage container. 
	int* tmpDataSize = new int[action_number]; 
	memset(tmpDataSize, 0, action_number*sizeof(int)); 

	bool* use = new bool[history.size()]; 

	//warning: the third argument is how many we select for learning. now we select everything 
	eliminateBYShuffle(use, history.size(), history.size()); 

	int i=0; 
	for(list<Transition>::iterator it=history.begin(); it != history.end(); it++, i++)
		if (use[i])
			tmpDataSize[(*it).action]++; 
	//-----------

	for(int i=0; i< action_number; i++)
		for(int j=0; j< obs_dim ; j++)	//set the parameters of each learner(each output)
		{
			if (tmpDataSize[i]>0)
			{
				tmpPoints = annAllocPts(tmpDataSize[i], obs_dim); 
				tmpTargets = new Observation_type[tmpDataSize[i]]; 
			}
			updatePointsIndividual (history, use, tmpPoints, tmpTargets, i,j);  
			int tmpReducedDimension = transformData(tmpPoints,  tmpTargets,tmpDataSize[i], i, j); 


			ANNkd_tree* tmpLearner = new ANNkd_tree(tmpPoints, tmpDataSize[i], tmpReducedDimension); 

			//mutex in this part because we're transferring new data to our learners
			boost::mutex::scoped_lock *  lock = new boost::mutex::scoped_lock(m_mutex);
#ifdef USE_METRIC_LEARNING
			metricLearner->submitAPCChanges(i,j); 
#endif
			if (learners(i,j))
				delete learners(i,j); 
			learners(i,j) = tmpLearner; 
			dataSize[i] = tmpDataSize[i]; 

			reducedDimensions(i,j) = tmpReducedDimension; 

			if (dataPoints(i,j))
				annDeallocPts(dataPoints(i,j)); 
			dataPoints(i,j) = tmpPoints;

			if (targets(i,j))
			{
				delete[]  targets(i,j); 
			}
			targets(i,j) = tmpTargets; 
			delete lock; 
			//mutex out 
		}

	delete[] use; 
	delete[] tmpDataSize; 


	draw(); 
}

int KNNGeneralizer::transformData(ANNpointArray& data,  Observation_type*& targets, int size, int action, int dim)
{
	for(int k=0; k< size; k++)	//for each obs
	{
		for(int x=0; x< obs_dim; x++)	//for each dim in obs
		{
			//rescale input 
			data[k][x] = (data[k][x] - m_agent->taskSpec.double_observations[x].min) / (m_agent->taskSpec.double_observations[x].max - m_agent->taskSpec.double_observations[x].min); 
		}
		//we also rescale outputs to make sure numerical problems don't occur (the step size 'alpha' in metric learning is fixed, 
		// if the output is too small or too large, we need to change the step size too). Note that we're regressing for j-th dimension here.
		targets[k] /= (m_agent->taskSpec.double_observations[dim].max - m_agent->taskSpec.double_observations[dim].min); 
	}

#ifdef USE_METRIC_LEARNING
	return metricLearner->learnTransformation(data, targets, size, action, dim); 	
#else
	return obs_dim; 
#endif
}



void KNNGeneralizer::transformData()
{

	for(int i=0; i< action_number; i++)
		for(int j=0; j< obs_dim ; j++)	//for each learner
		{
			reducedDimensions(i,j) = transformData(dataPoints(i,j), targets(i,j), dataSize[i], i, j); 
#ifdef USE_METRIC_LEARNING
			metricLearner->submitAPCChanges(i,j); 
#endif
		}








/*
	for(int i=0; i< action_number; i++)
		for(int j=0; j< obs_dim ; j++)	//for each learner
			for(int k=0; k< dataSize[i]; k++)	//for each obs
			{
				for(int x=0; x< obs_dim; x++)	//for each dim in obs
				{
					//rescale input 
					dataPoints(i,j)[k][x] = (dataPoints(i,j)[k][x] - m_agent->taskSpec.double_observations[x].min) / (m_agent->taskSpec.double_observations[x].max - m_agent->taskSpec.double_observations[x].min); 
				}
				//we also rescale outputs to make sure numerical problems don't occur (the step size 'alpha' in metric learning is fixed, 
				// if the output is too small or too large, we need to change the step size too). Note that we're regressing for j-th dimension here.
				targets(i,j)[k] /= (m_agent->taskSpec.double_observations[j].max - m_agent->taskSpec.double_observations[j].min); 
			}

#ifdef USE_METRIC_LEARNING
	metricLearner->learnTransformation(); 		
#endif
*/
}


void KNNGeneralizer::learn(const Transition* t)
{
	//we don't do online learning
}


//this is the local version that predicts only one output dimension
double KNNGeneralizer::predict(const Observation st, Action a, int dim, int k,  Observation_type& result, double& distance)
{
	if (! learners(a,dim))
		return 0; 


	Observation newst = MREAgent::copyObservation(st); 
	for(int x=0; x< obs_dim; x++)	//for each dim in obs
	{
		newst[x] = (newst[x] - m_agent->taskSpec.double_observations[x].min) / (m_agent->taskSpec.double_observations[x].max - m_agent->taskSpec.double_observations[x].min); 
	}

#ifdef USE_METRIC_LEARNING
	metricLearner->transform(newst, a, dim); 
#endif

	try{
		learners(a,dim)->annkSearch(newst, k, nnIdx, dists, epsilon); 
	} catch(...)
	{
		delete[] newst; 
		return false; 
	}

	delete[] newst; 

	double* kernels= new double[k]; 
	computeKernels(dists, kernels); 
	
	result = 0; 
	double denom = 0; 
	for(int i=0; i< k ; i++)
	{
		result += targets(a,dim)[nnIdx[i]]*kernels[i]; 
		denom += kernels[i]; 
	}

	distance = denom; 
	result /= denom;
	delete[] kernels; 



	//the output was scaled to [0,1] before. rescaling back now
	result *= (m_agent->taskSpec.double_observations[dim].max - m_agent->taskSpec.double_observations[dim].min); 

	return distance; 
}

void KNNGeneralizer::computeKernels(double* dists, double* kernels)
{
	for(int i=0; i< K ; i++)
	{
		kernels[i] = expf(-dists[i]/ sigma2); 
	}
}

Observation KNNGeneralizer::predict(Observation st, Action a, double& confidence)
{
	boost::mutex::scoped_lock lock (m_mutex);

	Observation result = new Observation_type[obs_dim]; 

	confidence = 1.0; 
	for(int i=0; i< obs_dim; i++)
	{
		double tmpVal; 
		double tmpConf = predict(st, a, i, K, result[i],tmpVal ); 
		if (tmpConf==0)
		{
			confidence = tmpConf; 
			delete[] result; 
			return 0; 
		}else if (tmpConf < confidence)
			confidence = tmpConf; 

		if (LEARN_DIFFERENCES)
			result[i] += st[i]; 
	}

//	Observation tmp = otf->predict(st, a); 
//	printf("prediction is wrong [%lf,%lf] -> [%lf,%lf]\n", tmp[0], tmp[1], result[0], result[1]); 

	return result;
}


Observation KNNGeneralizer::predict(Observation st, Action a)
{
	double tmp; 
	return predict(st, a, tmp); 
}


double KNNGeneralizer::getConfidence(Observation st, Action a)
{
	return 1.0; //warning: 
}





void KNNGeneralizer::draw()
{
	int action = 0; //we're fixing the action for now to avoid cluttered plots

	//draw knownness --------------------------------------
	const int resolution = 30; 
	MatlabMatrix<double>& vals = createMeshedDataStructure(resolution, m_agent->dimension,  m_agent); 

	int cntr = 0; 
	Observation tmp = new Observation_type[m_agent->dimension]; 

	for(int i=2; i< obs_dim; i++)		//set other dimension values to the middle (because we can only show 2 dimensions)
		tmp[i] = (m_agent->taskSpec.double_observations[i].max + m_agent->taskSpec.double_observations[i].min)/2.0; 

	for (int i=0; i< resolution; i++)
	{
		for(int j=0; j< resolution; j++, cntr++)
		{
			tmp[0] = vals(cntr,0); 
			tmp[1] = vals(cntr,1); 

			for(int x=0; x< obs_dim; x++)
			{	
				double dists; 
				double tmpVal; 
		
				//mutex in this part because we're transferring new data to our learners
				boost::mutex::scoped_lock *  lock = new boost::mutex::scoped_lock(m_mutex);
				predict(tmp, action, x, 7, tmpVal, dists);		
				delete lock; 
				vals(cntr,2+x) = dists; 
			}
		}
	}

	MatlabDrawer::drawMeshedValues(vals, "2D",1); 
	delete[] tmp; 
	delete (&vals); 

	//----------------------------------------------


	// draw points -----------------------------------
	drawScatteredPoints(); 


	// end draw points --------------------------------
}

//since the variables we want to draw are already in matlab, we use our own version of draw scattered points
void KNNGeneralizer::drawScatteredPoints()
{
#if defined(MATLAB_PLOT) && defined(USE_METRIC_LEARNING)
	int action = 0; 
	char* tmp = new char[200]; 
	for (int i=0; i< obs_dim; i++)
	{
		//draw original points
		sprintf(tmp,"subplot(%d,%d,%d);", nRow, nCol, obs_dim+i+1); 
		engEvalString(getEngine(), tmp); 
		
		sprintf(tmp, "scatter(xbackup{1}{%d}(:,1), xbackup{1}{%d}(:,2),'.');",i+1, i+1);  
		engEvalString(getEngine(), tmp); 

		//draw transformed points
		sprintf(tmp,"subplot(%d,%d,%d);", nRow, nCol, 2*obs_dim+i+1); 
		engEvalString(getEngine(), tmp); 
		
		if (reducedDimensions(action, i)<2)
			sprintf(tmp, "scatter(xp{1}{%d}(1,:), zeros(1,size(xp{1}{%d},2)),'+');", i+1,i+1); 
		else
			sprintf(tmp, "scatter(xp{1}{%d}(1,:), xp{1}{%d}(2,:),'.');",i+1, i+1);  
		engEvalString(getEngine(), tmp); 
	}


	//-------let's set the labels so we know what we've drawn: 

	//the first row is for knownness
	sprintf(tmp, "h=subplot(%d,%d,1); set(get(h,'YLabel'),'String','knownness');title('dim=1');", nRow, nCol); 
	engEvalString(getEngine(), tmp); 


	for(int i=2; i<=obs_dim; i++)
	{
		sprintf(tmp, "subplot(%d,%d,%d); title('dim=%d'); ", nRow, nCol, i, i); 
		engEvalString(getEngine(), tmp); 
	}

	//the second row is for raw data
	sprintf(tmp, "h=subplot(%d,%d,%d); set(get(h,'YLabel'),'String','Raw Data');", nRow, nCol, obs_dim+1); 
	engEvalString(getEngine(), tmp); 

	//the third row is for transformed data
	sprintf(tmp, "h=subplot(%d,%d,%d); set(get(h,'YLabel'),'String','Transformed');title('new dim=%d');", nRow, nCol, 2*obs_dim+1, reducedDimensions(action, 0)); 
	engEvalString(getEngine(), tmp); 


	for(int i=2; i<=obs_dim; i++)
	{
		sprintf(tmp, "subplot(%d,%d,%d); title('new dim=%d'); ", nRow, nCol,2*obs_dim+ i, reducedDimensions(action, i-1)); 
		engEvalString(getEngine(), tmp); 
	}


	delete[] tmp; 

#endif
}