/**
 * 
 */
package edu.umd.clip.lm.model.training.metrics;

import java.util.Arrays;
import java.util.Map;
import java.util.Random;

import org.apache.commons.math.MathException;
import org.apache.commons.math.distribution.*;
import org.w3c.dom.Element;

import edu.umd.clip.lm.model.training.ContextVariable;
import edu.umd.clip.lm.model.training.ContextVariableStats;
import edu.umd.clip.lm.util.ConstCountDistribution;
import edu.umd.clip.lm.util.CountDistribution;
import edu.umd.clip.lm.util.Long2IntMap;
import edu.umd.clip.lm.util.ProbMath;
import edu.umd.clip.lm.util.RandomUtil;

/**
 * @author Denis Filimonov <den@cs.umd.edu>
 *
 * Based on Matlab implementation by Giuseppe Cardillo 
 * http://www.mathworks.com/matlabcentral/fileexchange/26883
 * 
 */
public class FisherExactTest extends ContextVariableMetrics implements ContextVariableStoppingRule {
	private double maxPvalue;
	private static long MAX_TEST_COUNT = 1000;
	private static final int MIN_RUNS = 20;
	private final double DELTA_COEFF;
	
	private static final double ALPHA = 0.01; // estimated p-Value is within DELTA units of the true value   
	private static final double MIN_DELTA = 0.01; // with confidence (1-ALPHA)*100%
	
	private static final int FACTORIAL_TABLE_SIZE = 1000;
	// pre-computed ln(N!) values
	private static final double logFactorialTable[] = new double[FACTORIAL_TABLE_SIZE];
	
	public static final String P_VAL_ATTR = "p-value";
	
	static {
		double lastNum = 0;
		for(int i=1; i<FACTORIAL_TABLE_SIZE; ++i) {
			lastNum += Math.log(i);
			logFactorialTable[i] = lastNum;
		}
	}
	
	// for test purposes only
	//private ChiSquaredProbabilityMetrics chi2;
	
	public FisherExactTest(Element elem) {
		super(elem);
		maxPvalue = Double.parseDouble(elem.getAttribute(P_VAL_ATTR));

		NormalDistribution gauss = new NormalDistributionImpl(0, Math.sqrt(0.5));
		try {
			double normProb = gauss.inverseCumulativeProbability(1.0 - ALPHA/2);
			DELTA_COEFF = normProb / 2;
		} catch (MathException e) {
			e.printStackTrace();
			throw new Error("math error");
		}
		
		//chi2 = new ChiSquaredProbabilityMetrics(maxPValue);
	}

	/**
	 * @param orderContextVariables
	 * @param name
	 */
	public FisherExactTest(double maxPValue) {
		super("Fisher's Exact Test");
		this.maxPvalue = maxPValue;
		
		NormalDistribution gauss = new NormalDistributionImpl(0, Math.sqrt(0.5));
		try {
			double normProb = gauss.inverseCumulativeProbability(1.0 - ALPHA/2);
			DELTA_COEFF = normProb / 2;
		} catch (MathException e) {
			e.printStackTrace();
			throw new Error("math error");
		}
		
		//chi2 = new ChiSquaredProbabilityMetrics(maxPValue);
	}

	/* (non-Javadoc)
	 * @see edu.umd.clip.lm.model.training.ContextVariableMetrics#computeScore(edu.umd.clip.lm.model.training.NewTrainer.ContextVariableStats)
	 */
	@Override
	public double computeScore(ContextVariable ctxVar, ContextVariableStats stat) {
		if (stat.contextCounts.getTotalCount() > MAX_TEST_COUNT) {
			// do not test large samples, chances are they are good enough
			return 0.0;
		}
		
		int N = (int) stat.contextCounts.getTotalCount();
		
		// enumerate context variables and words
		Long2IntMap varIndices = new Long2IntMap(stat.contextCounts.size());
		for(long key : stat.contextCounts.keys()) {
			varIndices.put(key, varIndices.size());
		}
		
		Long2IntMap wordIndices = new Long2IntMap(stat.wordCounts.size());
		for(long key : stat.wordCounts.keys() ) {
			wordIndices.put(key, wordIndices.size());
		}
		
		
		short rows[] = new short[N];
		short columns[] = new short[N];
		
		int pos = 0;
		long keys[] = stat.x2wDistributions.keys();
		
		for(long key : keys) {
			final short varIdx = (short) varIndices.get(key);
			final ConstCountDistribution dist = stat.x2wDistributions.get(key);
			
			long words[] = dist.keys();
			long counts[] = dist.values();
			
			for(int i=0; i<words.length; ++i) {
				short wordIdx = (short) wordIndices.get(words[i]);
				long count = counts[i];
				for(long j=0; j<count; ++j) {
					rows[pos] = varIdx;
					columns[pos] = wordIdx;
					++pos;
				}
			}
		}
		
		// compute the constant factor KF = ln(\prod{R!}*\prod{C!}/N!)
		double KF = 0;
		for(long r : stat.contextCounts.values() ) {
			KF += factorialLog((int) r);
		}
		for(long c : stat.wordCounts.values() ) {
			KF += factorialLog((int) c);
		}
		KF -= factorialLog(N);
		
		final int numRows = wordIndices.size();
		
		double observedProbability = computeMatrixProb(KF, rows, columns, numRows);
		
		int MCC = 0;
		
		final Random rnd = new Random();
		
		int nrRuns = 0;
		
		while(true) {
			
			// generate a new sample
			RandomUtil.shuffleArray(columns, rnd);
			
			double prob = computeMatrixProb(KF, rows, columns, numRows);
			if (prob < observedProbability || ProbMath.approxEqual(prob, observedProbability, 0.001)) {
				++MCC;
			}

			++nrRuns;
			if (nrRuns > MIN_RUNS) {
				double delta = DELTA_COEFF / Math.sqrt(nrRuns);
				double pValue = (double)MCC / nrRuns;
				// if the estimated pValue is already too far from the cut-off, we don't need a more precise estimate
				// but make sure we don't go on indefinitely
				if (Math.abs(pValue - maxPvalue) > delta || delta < MIN_DELTA) {
					/*
					double chi2PValue = chi2.computeScore(stat);
					System.err.printf("Fisher: %g (+/-%g), Chi2: %g, %dx%d, totalCount=%d (runs=%d)\n", 
							pValue, delta, chi2PValue, wordIndices.size(), varIndices.size(), N, nrRuns);
					*/
					return pValue;
				}
			}
		}
		
	}

	/**
	 * 
	 * @param KF constant factor KF = ln(\prod{R!}*\prod{C!}/N!)
	 * @param rows - rows MUST be continuous (but not necessarily sorted)
	 * @param columns
	 * @param numRows
	 * @return the log probability of observed matrix under the null hypothesis
	 */
	private double computeMatrixProb(final double KF, final short rows[], final short columns[], final int numRows) {
		double prob = KF;
		
		final short currentColumn[] = new short[numRows];
		short lastRow = rows[0];
		
		for(short i = 0; i < rows.length; ++i) {
			short r = rows[i];
			short c = columns[i];

			if (r != lastRow) {
				lastRow = r;
				for(short cell : currentColumn) {
					if (cell > 1) {
						prob -= factorialLog(cell);
					}
				}
				Arrays.fill(currentColumn, (short)0);
			}
			++currentColumn[c];
		}
		// do the last column
		for(short cell : currentColumn) {
			if (cell > 1) {
				prob -= factorialLog(cell);
			}
		}
		return prob;
	}
	
	public static final double factorialLog(int num) {
		if (num < FACTORIAL_TABLE_SIZE) {
			return logFactorialTable[num];
		}
		
		double result = logFactorialTable[FACTORIAL_TABLE_SIZE-1];
		for(int i=FACTORIAL_TABLE_SIZE; i<=num; ++i) {
			result += Math.log(i);
		}
		return result;
	}
	
	/* (non-Javadoc)
	 * @see edu.umd.clip.lm.model.training.ContextVariableMetrics#needContextVarCounts()
	 */
	@Override
	public boolean needContextVarCounts() {
		return true;
	}

	/* (non-Javadoc)
	 * @see edu.umd.clip.lm.model.training.ContextVariableMetrics#needVarToWordCounts()
	 */
	@Override
	public boolean needVarToWordCounts() {
		return true;
	}

	/* (non-Javadoc)
	 * @see edu.umd.clip.lm.model.training.ContextVariableMetrics#needWordCounts()
	 */
	@Override
	public boolean needWordCounts() {
		return true;
	}

	/* (non-Javadoc)
	 * @see edu.umd.clip.lm.model.training.ContextVariableMetrics#needWordToVarCounts()
	 */
	@Override
	public boolean needWordToVarCounts() {
		return false;
	}

	@Override
	public boolean isGood(ContextVariable ctxVar, ContextVariableStats stat) {
		int degreesOfFreedom = (stat.wordCounts.size() - 1) * (stat.contextCounts.size() - 1);
		if (degreesOfFreedom <= 0) {
			return false;
		}
		
		double pValue = computeScore(ctxVar, stat);
		
		return pValue < maxPvalue;
	}
	
	/* (non-Javadoc)
	 * @see edu.umd.clip.lm.model.training.metrics.ContextVariableMetrics#saveXML(org.w3c.dom.Element)
	 */
	@Override
	public void saveXML(Element elem) {
		super.saveXML(elem);
		elem.setAttribute(P_VAL_ATTR, Double.toString(maxPvalue));
	}

	@Override
	public String toString() {
		return "FisherExactTest [MIN_DELTA=" + MIN_DELTA + ", maxPvalue="
				+ maxPvalue + "]";
	}
	
}
