#include "MahalanobisMetricLearner.h"
#include "MetricLearner.h"
#include "FMLKRGeneralizer.h"
#include "MREAgent.h"
#include "Util.h"
#include "ParamReader.h"

MahalanobisMetricLearner::MahalanobisMetricLearner(FMLKRGeneralizer* p)
: SingleVariateMetricLearner(p){
	
	//initialize A and pc
	A = new MatlabMatrix< MatlabMatrix<double>* >(parent->action_number, parent->obs_dim); 
	pc = new MatlabMatrix< MatlabMatrix<double>* >(parent->action_number, parent->obs_dim); 
	threadA = new MatlabMatrix<double>(parent->obs_dim, parent->obs_dim);
	threadPc = 0; 
	for(int i=0; i< parent->action_number; i++)
		for(int j=0; j< parent->obs_dim ; j++)	
		{
			A->operator ()(i,j) = new MatlabMatrix<double>(parent->obs_dim, parent->obs_dim); //A is always obs_dim*obs_dim, so we only create it once 
			pc->operator ()(i,j) = 0;   //sice of each pc is different from run to run, so we should create it each time
		}
}

MahalanobisMetricLearner::~MahalanobisMetricLearner(void)
{

	for(int i=0; i< parent->action_number; i++)
		for(int j=0; j< parent->obs_dim ; j++)	
		{
			if (A->operator ()(i,j))
				delete A->operator ()(i,j); 
			if (pc->operator ()(i,j))
				delete pc->operator ()(i,j); 
		}

	delete A; 
	delete threadA; 
	delete pc; 
}

void MahalanobisMetricLearner::submitAPCChanges(int action, int dim)
{
	*(A->operator()(action,dim)) = *threadA; 
	if (pc->operator ()(action,dim))
		delete pc->operator ()(action,dim); 
	pc->operator ()(action,dim) = threadPc; 
}

int MahalanobisMetricLearner::learnTransformation(ANNpointArray& data, Observation_type*& targets, int size, int action, int dim)
{
#ifdef USE_MATLAB
	//we have one KNN regression problem here to deal with: 
	double** x = data;  
	double*  y = targets;  
	int reducedDimension = parent->obs_dim; 

	//create variable x (note that since matlab stores column-wise and we have row-wise here, and it's more
	//efficient to transpose the matrix in matlab we send data as if it is obs_dim*dataSize while it's the
	//other way around. it is crucial that we transpose data in matlab before we do anything. 
	mxArray* xm = mxCreateDoubleMatrix(parent->obs_dim, size , mxREAL); 
	double* tmpPtr = (double*) mxGetPr(xm); 
	memcpy(tmpPtr, x[0], parent->obs_dim*size*sizeof(double)); 
	int res = engPutVariable(getEngine(), "x", xm); 

	mxArray* ym = mxCreateDoubleMatrix(size , 1,  mxREAL); 
	tmpPtr = (double*) mxGetPr(ym); 
	memcpy(tmpPtr, y, size*sizeof(double)); 
	res = engPutVariable(getEngine(), "y", ym); 


	//transpose x
	engEvalString(getEngine(),"x = x';");

	//run the dimension reduction and transpose again 
	char* tmpStr = new char[160]; 
	sprintf(tmpStr, "[xptmp, Atmp ,pctmp ] = MLKRRegression(x,y,0.60, 0.03,%d);xptmp=xptmp';xp{%d}{%d}=xptmp;pc{%d}{%d}=pctmp;A{%d}{%d} = Atmp;", ParamReader::MAHALANOBIS_USE_PCA,  action+1,dim+1,action+1,dim+1,action+1,dim+1); //matlab index starts at 1 
	engEvalString(getEngine(), tmpStr);
	delete tmpStr; 


	//just for debugging: backup x and y in matlab
	tmpStr = new char[80]; 
	sprintf(tmpStr, "xbackup{%d}{%d} = x; ybackup{%d}{%d} = y; ", action+1,dim+1, action+1,dim+1); 
	engEvalString(getEngine(), tmpStr); 
	delete tmpStr; 

	engEvalString(getEngine(), "dim = size(xptmp,1);"); 
	mxArray* dimm = engGetVariable(getEngine(), "dim"); 
	mxArray* xpm = engGetVariable(getEngine(), "xptmp");

	double ddim = 0; 
	memcpy(&ddim, mxGetPr(dimm), sizeof(double)); 
	reducedDimension = (int) ddim; 


	//extract A and pc into their c++ corresponding variables 
	mxArray* Am = engGetVariable(getEngine(), "Atmp");
	mxArray* pcm = engGetVariable(getEngine(), "pctmp"); 
	double* APtr = threadA->getStorage(); 
	//create new pc 
	threadPc = new MatlabMatrix<double>(parent->obs_dim, reducedDimension); 
	double* pcPtr = threadPc->getStorage(); 
	memcpy(APtr, mxGetPr(Am), parent->obs_dim*parent->obs_dim*sizeof(double)); 
	memcpy(pcPtr, mxGetPr(pcm), parent->obs_dim*reducedDimension*sizeof(double)); 
	
	//reallocate data using new dimension
	if (data)
		annDeallocPts(data); 
	data = annAllocPts(size, reducedDimension); 

	//copy data
	memcpy(data[0], mxGetPr(xpm), size*reducedDimension*sizeof(double)); 

	return reducedDimension; 
#else //warning: what should we do here?
	printf("ERROR: using mahalanobis learner without defining USE_MATLAB\n\n\n\n"); 
	return 0; 
#endif
}


void MahalanobisMetricLearner::learnTransformation()
{
#ifdef USE_MATLAB
	for(int i=0; i< parent->action_number; i++)
		for(int j=0; j< parent->obs_dim ; j++)	
		{
			parent->reducedDimensions(i,j) = learnTransformation(parent->dataPoints(i,j), parent->targets(i,j), parent->dataSize[i], i,j); 
			submitAPCChanges(i,j); 
		}
#endif


/*
//obsolete (it's been changed to allow multi-thread safe computation)
#ifdef MATLAB_PLOT
	for(int i=0; i< parent->action_number; i++)
		for(int j=0; j< parent->obs_dim ; j++)	
		{
			//we have one KNN regression problem here to deal with: 
			double** x = parent->dataPoints(i,j); 
			double*  y = parent->targets(i,j); 

			//create variable x (note that since matlab stores column-wise and we have row-wise here, and it's more
			//efficient to transpose the matrix in matlab we send data as if it is obs_dim*dataSize while it's the
			//other way around. it is crucial that we transpose data in matlab before we do anything. 
			mxArray* xm = mxCreateDoubleMatrix(parent->obs_dim, parent->dataSize[i] , mxREAL); 
			double* tmpPtr = (double*) mxGetPr(xm); 
			memcpy(tmpPtr, x[0], parent->obs_dim*parent->dataSize[i]*sizeof(double)); 
			int res = engPutVariable(getEngine(), "x", xm); 

			mxArray* ym = mxCreateDoubleMatrix(parent->dataSize[i] , 1,  mxREAL); 
			 tmpPtr = (double*) mxGetPr(ym); 
			memcpy(tmpPtr, y, parent->dataSize[i]*sizeof(double)); 
			res = engPutVariable(getEngine(), "y", ym); 


			//transpose x
			engEvalString(getEngine(),"x = x';");
	
			//run the dimension reduction and transpose again 
			char* tmpStr = new char[160]; 
			sprintf(tmpStr, "[xptmp, Atmp ,pctmp ] = pcaRegression(x,y,0.80);xptmp=xptmp';xp{%d}{%d}=xptmp;pc{%d}{%d}=pctmp;A{%d}{%d} = Atmp;", i+1,j+1,i+1,j+1,i+1,j+1); //matlab index starts at 1 
			engEvalString(getEngine(), tmpStr);
			delete tmpStr; 


			//just for debugging: backup x and y in matlab
			tmpStr = new char[80]; 
			sprintf(tmpStr, "xbackup{%d}{%d} = x; ybackup{%d}{%d} = y; ", i+1,j+1, i+1,j+1); 
			engEvalString(getEngine(), tmpStr); 
			delete tmpStr; 

			engEvalString(getEngine(), "dim = size(xptmp,1);"); 
			mxArray* dimm = engGetVariable(getEngine(), "dim"); 
			mxArray* xpm = engGetVariable(getEngine(), "xptmp");

			double dim = 0; 
			memcpy(&dim, mxGetPr(dimm), sizeof(double)); 
			parent->reducedDimensions(i,j) = (int) dim; 


			//extract A and pc into their c++ corresponding variables 
			mxArray* Am = engGetVariable(getEngine(), "Atmp");
			mxArray* pcm = engGetVariable(getEngine(), "pctmp"); 
			double* APtr = A->operator ()(i,j)->getStorage(); 
			//recreate pc (because we don't know its size apriori 
			if (pc->operator ()(i,j))
				delete pc->operator ()(i,j); 
			pc->operator ()(i,j) = new MatlabMatrix<double>(parent->obs_dim, parent->reducedDimensions(i,j)); 
			double* pcPtr = pc->operator ()(i,j)->getStorage(); 
			memcpy(APtr, mxGetPr(Am), parent->obs_dim*parent->obs_dim*sizeof(double)); 
			memcpy(pcPtr, mxGetPr(pcm), parent->obs_dim*parent->reducedDimensions(i,j)*sizeof(double)); 
			


			//reallocate data using new dimension
			if (parent->dataPoints(i,j))
				annDeallocPts(parent->dataPoints(i,j)); 
			parent->dataPoints(i,j) = annAllocPts(parent->dataSize[i], parent->reducedDimensions(i,j)); 


			//copy data
			memcpy(parent->dataPoints(i,j)[0], mxGetPr(xpm), parent->dataSize[i]*parent->reducedDimensions(i,j)*sizeof(double)); 
		}
#endif
*/
}


// this function is similar to transform function but does the job in matlab. 
// another difference is that this one can take multiple observations at a time. 
void MahalanobisMetricLearner::transformInMatlab(ANNpointArray& data, int size, Action a, int dim)
{
#ifdef USE_MATLAB
	//this does exactly as the transform function except that it does it in matlab
	mxArray* xm = mxCreateDoubleMatrix(parent->obs_dim, size , mxREAL); 
	double* tmpPtr = (double*) mxGetPr(xm); 
	memcpy(tmpPtr, data[0] , parent->obs_dim*size*sizeof(double));

	int res = engPutVariable(getEngine(), "tmpObs", xm); 
	char* tmpStr = new char[60]; 
	sprintf(tmpStr, "tmpObs = (( A{%d}{%d}*tmpObs)'*pc{%d}{%d})';", a+1, dim+1, a+1, dim+1); 
	engEvalString(getEngine(), tmpStr);

	mxArray* xpm = engGetVariable(getEngine(), "tmpObs");

	//reallocate data using new dimension
	if (data)
		annDeallocPts(data); 
	data = annAllocPts(size, parent->reducedDimensions(a,dim)); 

	memcpy(data[0], mxGetPr(xpm), size*parent->reducedDimensions(a,dim)*sizeof(double)); 
	delete[] tmpStr; 

#endif


}


void MahalanobisMetricLearner::transform(Observation newst, Action a, int dim )
{
#ifdef USE_MATLAB
	int rdim = parent->reducedDimensions(a,dim); 
	int obs_dim = parent->obs_dim; 
	MatlabMatrix<double>* pcStart = pc->operator ()(a,dim); 
	MatlabMatrix<double>* AStart = A->operator ()(a,dim); 
	Observation_type* tobs = new Observation_type[obs_dim]; 
	memset(tobs, 0, obs_dim*sizeof(double)); 
	for(int i=0; i< obs_dim; i++)
	{
		for(int j=0; j< obs_dim; j++)
			tobs[j] += AStart->operator ()(j,i)*newst[i]; 
	}

	for(int i=0; i< rdim; i++)
	{
		newst[i] = 0; 
		for (int j=0; j< obs_dim; j++)
			newst[i] += tobs[j]*pcStart->operator()(j,i); 
	}

	delete[] tobs; 




#endif

}