// Nearest Ranked Neighbors
// Jonas Almeida Dec 4, 2012


NRN={
	
sum:function(x){
		return x.reduce(function(a,b){return a+b})
},

msg:function(msg){  // message
	console.log(msg);
	return false;
},

interp1:function(X,Y,XI){ // linear interpolation, remember X is supposed to be sorted
	var n = X.length;
	var YI = XI.map(function(XIi){
		var i=jmat.sum(X.map(function(Xi){if (Xi<XIi){return 1}else{return 0}}));
		if (i==0){return Y[0]} // lower bound
		else if (i==n){return Y[n-1]} // upper bound
		else{return (Y[i-1]+(XIi-X[i-1])*(Y[i]-Y[i-1])/(X[i]-X[i-1]))}
	});
	return YI
},

transpose:function (x){ // transposes 2D array
	var y=[],n=x.length,m=x[0].length
	if(typeof(m)=="undefined"){ // x is a vector
		for(var i = 0;i<n;i++){
			y[i]=[x[i]];
		}
	}
	else{
		for(var j=0;j<m;j++){
			y[j]=[];
			for(var i=0;i<n;i++){
				y[j][i]=x[i][j];
			}
		}
	}
	return y
},

rank:function(x){ // [y,I]=sort(x), where y is the sorted array and I contains the indexes
	x=x.map(function(xi,i){return [xi,i]});
	x.sort(function(a,b){return a[0]-b[0]});	
	var I = NRN.transpose(x)[1]; // get the indexes
	var y = [];
	I.map(function(Ii,i){y[Ii]=i});
	return y;
},

minInd:function(x){ // index of the minimum value
	return x.map(function(xi,i){return [xi,i]}).reduce(function(a,b){if(a[0]<b[0]){return a}else{return b}})[1];
},

rankN:function(x,y){ // rank neighboring outcomes
	var T=NRN.transpose, R=NRN.rank; // just to make it prettier :-)
	var ry = R(y); // rank outcomes
	var n = x.length;
	if(y.length!==x.length){throw("dimentions of values and outcomes parameters don't match: "+x.length+", "+y.length+",  respectively")}
	var xT = T(x);
	xT = xT.map(function(xi){return R(xi)});
	rx = T(xT);
	//return rx;
	var z = []; // store results here
	x.map(function(xi,i){ // for each row find the one with the closest neighbor and record absolute difference between the ranks of the corrresponding outcomes
		var rxx = rx.slice(0); // remember .slice(0) clones the Array
		rxx.splice(i,1); //removes the ith row
		z[i]=NRN.minInd(NRN.euclid(rxx,rx[i]));
		if(z[i]>=i){z[i]+=1}; // to compensate for the excluded ith row
		// at this point z[i] has the indexes of the closest neighbors, lets now replace it with teh difference between the ranks of the outcomes
		z[i] = Math.abs(ry[i]-ry[z[i]]);
	});
	return z
},

boost:function(x,y,fun){ // learn by boosting variable selection, generates a bst variable with teh results 
	var max = 30; // set maximum number of parameters to boost
	if(!fun){fun = NRN.Nindex} // using NRN.Nindex as teh default objective function
	var T=NRN.transpose, C = NRN.xCols, bst = {}, m = x[0].length ; 
	var parms = []; for(var i = 0 ; i<x[0].length ; i++){parms[i]=i} // parms left to boost, start with all
	//start with one at a time and go from there
	bst.x = x.slice(0);
	bst.y = y.slice(0);
	bst.nn = []; // Neighborhood index, or whatever fun was defined, kept here
	bst.nn[0]=[]; // starting with univariate selection
	if(!!C.Tx){delete C.Tx} // clear caching if it exists
	parms.map(function(i){
		bst.nn[0][i]=fun(C(x,[i],true),y); // note the caching
	});
	bst.ind = [NRN.minInd(bst.nn[0])]; // extract the index of the smallest value
	bst.min=[bst.nn[0][bst.ind[0]]]; // keep the lowest value here in case one wants to toss bst.nn in the end
	bst.parms = [];
	bst.parms[0]=parms.slice(0); // keep list of parameters tried
	//x = x.map(function(xi){return xi.splice(bst.ind[0],1)}); // remove corresponding value from dataset
	bst.ind[0]=parms[bst.ind[0]]; // convert to the original indexes
	parms.splice(bst.ind[0],1); // remove the selected parameter index from the list
	// now extract the remainder
	for(var j = 1;j<m;j++){
		if(j>=max){break} // break if maximum number of iterations exeeded
		bst.nn[j]=[];
		bst.parms[j]=parms.slice(0); // keep list of parameters to be tried
		parms.map(function(p,i){ // try each of the remaining parameters, parms
			var ind = bst.ind.slice(0);
			ind.push(p); // index of the columns being tried
			bst.nn[j][i]=fun(C(x,ind,true),y); // note caching being used
		});
		var mind = NRN.minInd(bst.nn[j]);
		bst.min.push(bst.nn[j][mind]); // record the min value
		bst.ind[j]=parms[mind]; // record selection in the original index numbering
		parms.splice(mind,1); // remove the selected parameter index from the list
		if(bst.min[j]>bst.min[0]){break} // break if cost became higher than univariate classification
		// delete bst.nn[j]  // uncoment this if you want to save (lose) memory along the way
		
	}
	// delete bst.nn  // to lose all memory of neighborhood searching
	return bst
},

predict:function(x,bst,ind,fun){ // predicts outcome for new values of independent variables
	if(!Array.isArray(x[0])){var newX = [x]}else{var newX = x.slice(0)}
	var oldX = bst.x;
	if(newX[0].length!=oldX[0].length){throw('Number of parameters doesn match: '+oldX[0].length+'for the model and '+newX[0].length+' for the new data :-(')}
	if(!ind){ind = NRN.minInd(bst.min)+1} // if not provided use the number of ordered indexes that minimizes fun
	if(typeof(ind)=='number'){ind = bst.ind.slice(0,ind)} // if ind is teh number of ordered indexes to be used
	// get only the columns that matter
	var C = NRN.xCols, R = NRN.rank, T=NRN.transpose;
	if(!fun){fun = NRN.Nindex} // using NRN.Nindex as teh default objective function 
	var newX = C(newX,ind);
	var oldX = C(oldX,ind);
	var oldXT = T(oldX);
	var RoldXT = oldXT.map(function(c){return R(c)});// ranked each column in model x, transposed
	var RoldX = T(RoldXT); // ranked indexes where the values were in x
	// find rank of newX by borrowing oldX's
	RnewX = [];  // borrowed ranks go here
	newX.map(function(r,i){ // for each row
		RnewX[i]=[];
		r.map(function(c,j){ // for each column (each cell)
			var ci = NRN.minInd(oldXT[j].map(function(cx){
						//find absolute difference to reference column
						return Math.abs(cx-c); 
					})
			);
			RnewX[i][j]=RoldX[ci][j]; // borrowing rank of closest value in oldX
		})
	})
	// RnewX has the ranks of the new values in the old rankign order, find closest neighbor
	var RnewY = [], newY = [];
	RnewX.map(function(r,i){ // for each row
		RnewY[i]=NRN.minInd(NRN.euclid(RoldX,r));
		newY[i]=bst.y[RnewY[i]]; // broow outcome of closest related (and ranked) neighbor
	})
	return newY
},

xCols:function(x,ind,cache){ // extracts specific columns from x, with the option of caching results
	if(cache){
		if(!NRN.xCols.Tx){NRN.xCols.Tx = NRN.transpose(x)} // caching transposed matrix
		var Tx = NRN.xCols.Tx;
	}
	else{
		var Tx = NRN.transpose(x);
		if(!NRN.xCols.Tx){delete NRN.xCols.Tx}
	}
	var y = [];
	ind.map(function(i,j){y[j]=Tx[i]});
	return NRN.transpose(y);
},

euclid:function(a,b){ // Euclidean distance between a and b, a can be a set of arrays and so can b, be can also ve a single vector while a is a set
	if(Array.isArray(a[0])){
		if(Array.isArray(b[0])){ // comparing two sets of arrays
			return a.map(function(ai,i){return NRN.euclid(ai,b[i])});
		}
		else{ // comparing a set of arrays, a with a single array, b
			return a.map(function(ai,i){return NRN.euclid(ai,b)});
		}		
	}
	else{
		return Math.sqrt(NRN.sum(a.map(function(ai,i){return Math.pow((ai-b[i]),2)})));
	}
},

Nindex:function(x,y){ // Neighborhood index: sum of outcome rank differences between each observation and its closest ranked neighbor
	return(NRN.sum(NRN.rankN(x,y))/(Math.pow(y.length,2)/3)); // note the division by n*n/3 such that random neighborhood has index ~1
},

costRank:function(r){  // calculates cost of rank order, not used here, just for reference
	var c = 0;
	for(var i=0;i<r.length-1;i++){
		c += Math.abs(r[i+1]-r[i]);
	}
	return c;
},

	
}