package mth.weka.cltree;

public class AttrSplit {
	/*
	 * This class implements measures and the method to calculate the split of an attribute.
	 * 
	 * @params
	 * 	splitcriterium - the splitcriterium (infogain, gini, etc.) --> alpha-version only with infogain
	 * 	binary - only binary splits?
	 * 	lookahead - should the lookahead technique be used
	 * 
	 * @members
	 * 	splitVal - the value where the split is set
	 * 	rdens - the relative density of the partition with the lowest relative density after the split at splitVal
	 * 	infogain - despite not required for CLTree it will be logged
	 * 	cut_num - amount of look-ahead splits calculated
	 * 	cut_list - list of splitVals where the cuts are computed 
	 * */
	
	private String splitcriterium;
	private boolean binary;
	private boolean lookahead;
	
	private double splitVal;
	private double rdens;
	private double infogain;
	private int cut_count;
	private double[] cut_list;
	private double[] cut_rd_val_list;
	private boolean LOEsplit; //less or equal split
	private int recursiveBacktracker;
	private boolean noNextSplit;
	
	public AttrSplit(String splitcrit, boolean lah, boolean bin) {
		splitcriterium = splitcrit;
		lookahead = lah;
		binary = bin;
		infogain = 0.0;
		cut_count = 0;
		cut_list = new double[3];
		cut_rd_val_list = new double[3];
		recursiveBacktracker = 0;
		noNextSplit = false;
	}
	
//	private double calcRd(double[] attVal, double origMin, double origMax, int nPoints, double tmpSplitVal) {
//		if (attVal == null || attVal.length == 0)
//			return 1.0;
//		
//		double rN = 0.0;
//		if (attVal[attVal.length-1] == origMax) { //are we in the higher valued area?
//			rN = nPoints * (origMax-tmpSplitVal)/(origMax-origMin);
//		} else {
//			rN = nPoints * (tmpSplitVal-origMin)/(origMax-origMin);
//		}
//		
//		return attVal.length/rN;
//	}
	
	private double getEntropy(double nPointProb, double yPointProb) {
		//for logging purposes:
		//System.out.println("ProbSum: "+(nPointProb+yPointProb)+"\n");
		
		if (nPointProb == 1 || yPointProb == 1) {
			return 0;
		} else {
			return (nPointProb==yPointProb)?1:-((nPointProb*(Math.log(nPointProb)/Math.log(2)))+
				(yPointProb*(Math.log(yPointProb)/Math.log(2))));
		}
	}
	
	//@param: a is the a'th element of attVal[]
	private double calcSplitEntropy(int nPoints, int yPoints, int a, double maxVal, double minVal, double[] attVal, boolean loe) {
		double nPointPos, nPointProb, bProb;
		
		//TODO: important: continue: catch 0 values in parameters
		
		//get entropy from left subset
		nPointPos = (double)nPoints*(attVal[a-1]-minVal) / (maxVal-minVal); 
		
		a = loe?a:a-1; //important to select LOE or GOE split
		
		nPointProb = nPointPos/(nPointPos+a);
		bProb = a/(nPointPos+a);
		
		double tempEntropyLow;
		if (bProb==0) { //TODO: what is about nPointProb? 
			tempEntropyLow = 0;
		} else {
			tempEntropyLow = getEntropy(nPointProb, bProb);
		}
		
		//get entropy from right subset
		nPointProb = ((double)nPoints-nPointPos) / ((double)nPoints-nPointPos+(double)yPoints-(double)a);
		bProb = (yPoints-a) / (nPoints-nPointPos+yPoints-a);
		double tempEntropyHigh;
		if (a==attVal.length) {//attVal.length-1 if int a=0 in for loop in calcBestAttribSplit method
			tempEntropyHigh = 0;
		} else {
			tempEntropyHigh = getEntropy(nPointProb, bProb);
		}
		
		//combine to weighted entropy of split 
		return (nPointPos+a)/(nPoints+yPoints) * tempEntropyLow + 
				(nPoints-nPointPos+yPoints-a)/(nPoints+yPoints) * tempEntropyHigh;
	}
	
	
	//splitting according to [Liu et al, 2000]
	//best cut (over all) is the cut with the lowest rd in L_i
	public void calcBestAttribSplit(double[] attVal, int nPoints, int yPoints) {
		if (attVal == null | attVal.length == 0)
			return;
		
		int splitPos=0;
		double minVal = attVal[0];
		double maxVal = attVal[attVal.length-1];
		double baseEntropy = getEntropy((double)nPoints/((double)nPoints+(double)yPoints), (double)yPoints/((double)nPoints+(double)yPoints));
		
		for (int a=1; a<=attVal.length; a++) {
			double tempEntropyLOE, tempEntropyGOE, tempEntropyGlobal;
			boolean tempLOEsplit;
			tempEntropyLOE = calcSplitEntropy(nPoints, yPoints, a, maxVal, minVal, attVal, true);
			tempEntropyGOE = calcSplitEntropy(nPoints, yPoints, a, maxVal, minVal, attVal, false);
			
			if (tempEntropyLOE<=tempEntropyGOE) { //preferring the LOE split over the GOE split //TODO: WHY?
				tempEntropyGlobal = tempEntropyLOE;
				tempLOEsplit = true;
			} else {
				tempEntropyGlobal = tempEntropyGOE;
				tempLOEsplit = false;
			}
			
			if (baseEntropy-tempEntropyGlobal > infogain) {
				splitVal = attVal[a-1];
				infogain = baseEntropy-tempEntropyGlobal;
				splitPos = a;
				LOEsplit = tempLOEsplit;
			}
		}
		
		if (splitPos == 0 || splitPos == attVal.length || (splitPos == attVal.length-1 && !LOEsplit)) {
			if (Double.isNaN(splitVal)) {
				infogain = 0;
			}
			noNextSplit = true;
			return; //no need to check for other Split if splitPos == 0 or == attVal.length: no better split can be found
		} else {
			cut_count++;
			cut_list[cut_count-1]=splitVal;
			
			if (lookahead) {
				//prepare data in two sets and call calcBestAttribSplit for the one with lower density
				//first step: get partition with lower relative density
				//second step: calculate new split for this partition
				//third step: check if finished or 3rd split necessary (first step on iteration 2)
				
				if (!noNextSplit && cut_count < 3) {
					//construct new instance space
					int loeSplitCorrector = 0;
					if (!LOEsplit) {
						loeSplitCorrector--;
					}
					double[] attValLow = new double[splitPos+loeSplitCorrector];
					double[] attValHigh = new double[attVal.length-splitPos+loeSplitCorrector];
					for (int a=0; a<attVal.length; a++) {
						if (a<attValLow.length) {
							attValLow[a] = attVal[a];
						} else {
							attValHigh[a-attValLow.length] = attVal[a];
						}
					}
					
					int nPointsLow = (int)Math.round(nPoints*(attVal[splitPos-1]-minVal) / (maxVal-minVal));
					double rdLow = (double)attValLow.length/(double)nPointsLow; //wrong calculation + too difficult: calcRd(attValLow, minVal, maxVal, nPointsLow, cut_list[cut_count-1]);
					
					int nPointsHigh = nPoints-nPointsLow; //to get back on the correct amount of nPoints
					double rdHigh = (double)attValHigh.length/(double)nPointsHigh; //wrong calculation + too difficult: calcRd(attValHigh, minVal, maxVal, nPointsHigh, cut_list[cut_count-1]);
					
					//TODO: not refreshing the nPoints in the L_i region, using the "existing" nPoints. i had to assume this, because 
					//i didn't found a matching passage in literature
					//watch for this - possibly a Way for configuring a new feature --> take on list for testing
					
					if (rdLow<rdHigh) { //TODO: reassess the following part
						cut_rd_val_list[cut_count-1] = rdLow;
						if (cut_count == 2) {
							if (attValLow[0] == cut_list[0] || attValLow[attValLow.length-1] == cut_list[0]) {
								recursiveBacktracker--;
								noNextSplit = true;
								return; //no 3rd cut necessary
							}
						}
						recursiveBacktracker++;
						calcBestAttribSplit(attValLow, nPointsLow, attValLow.length);
					} else {
						cut_rd_val_list[cut_count-1] = rdHigh;
						if (cut_count == 2) {
							if (attValLow[0] == cut_list[0] || attValLow[attValLow.length-1] == cut_list[0]) {
								recursiveBacktracker--;
								noNextSplit = true;
								return; //no 3rd cut needed
							}
						}
						recursiveBacktracker++;
						calcBestAttribSplit(attValHigh, nPointsHigh, attValHigh.length);
					}
					
				} else if (recursiveBacktracker == 1 && noNextSplit) {
					recursiveBacktracker--; //to prevent the recursively called children from double spending the consolidation
					return;
				} else if ((cut_count == 3 || (noNextSplit && cut_count == 2)) && recursiveBacktracker == 0) {
					for (int a=0; a<cut_count; a++) {
						//TODO: continue
						
					}
				} else if ((cut_count == 3 || (noNextSplit && cut_count==2)) && recursiveBacktracker !=0 ) {
					//catch 
					int i =0;
					i = cut_count + 1;
				}
			}
		}
	}

	public double getSplitVal() {
		return splitVal;
	}
	
	public double getRd() {
		return rdens;
	}
	
	public double getInfogain() {
		return infogain;
	}
	
	public int getCutNum() {
		return cut_count;
	}
	
	public double[] getCutList() {
		return cut_list;
	}
}
