/** Evaluation Framework for various NN models
 * 
 * 
 * @author Yong Boon, Lim (yongboon.lim@gmail.com)
 */

package eval;

import java.io.File;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadMXBean;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

import util.DocUtils;
import util.FileFinder;
import util.HammingDist;
import core.Core;
import core.NN;
import core.NNLSH;
import core.NNMultiHash;
import core.NNZDDBasic;
import core.NNZDDHash;
import core.NNZDDIdf;
import core.NNZDDIdfApprox;
import core.NNZDDLSH;
import core.NNZDDMinHash;
import core.NNZDDMultiHash;
import core.NNZDDTfIdf;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
// Evaluation Framework
public class Evaluator {
	public static boolean DEBUG = false;
	
	// Evaluate the list of NN model specified @ nnList with # of files = fileSize  
	public static void doEval(String testDataDir, List<Core> nnList, int fileSize) {
		ArrayList<File> testFiles = FileFinder.GetAllFiles(testDataDir, "", true);
		
		ThreadMXBean bean = ManagementFactory.getThreadMXBean();
		System.out.println("Method, Total Files, Correct, BuildTime, QueryTime, NodeSize, MeanQueryTimePerDoc,StdDevQueryTimePerDoc,StdErrQueryTimePerDoc");
		for (Core c : nnList) {
			int numCorrect = 0;
			int numFileProcessed = 0;
			SummaryStatistics summaryQueryTime = new SummaryStatistics();
			
			long startTimeB = System.currentTimeMillis();
			c.init();
			c.build();
			long endTimeB = System.currentTimeMillis();
			long startTimeQ = System.currentTimeMillis();
			for (File f : testFiles) {
				String actFile = f.getPath();
				long startTimeQPerDoc = System.currentTimeMillis();
				String estFile = c.query(actFile);
				long endTimeQPerDoc = System.currentTimeMillis();
				
				// Compare the Hamming Distance 
				if (estFile != "") {
					Map<Object, Double> actFileWordCount = DocUtils.ConvertToFeatureMapFilter(DocUtils.ReadFile(new File(actFile)));
					Map<Object, Double> estFileWordCount = DocUtils.ConvertToFeatureMapFilter(DocUtils.ReadFile(new File(estFile)));
	
					
					if (HammingDist.calcG(actFileWordCount.keySet(), estFileWordCount.keySet()) == 0) {
						numCorrect++;
					} else {
						if (DEBUG) {
							System.out.println("Actual: " + actFile + ", Est: " + estFile);
						}
					}
				}
				numFileProcessed++;
				if (fileSize != 0) {
					if (numFileProcessed > fileSize) {
						break;
					}
				}
				long durationQperDoc = endTimeQPerDoc - startTimeQPerDoc;
				summaryQueryTime.addValue(durationQperDoc);
				if (numFileProcessed%200 == 0 && DEBUG) {
					System.out.println("Processed: " + numFileProcessed + ", Correct: " + numCorrect );
				}

			}
			long endTimeQ = System.currentTimeMillis();
			long durationB = endTimeB - startTimeB;
			long durationQ = endTimeQ - startTimeQ;
			double meanDurationQPerDoc = summaryQueryTime.getMean();
			double stdDevDurationQPerDoc = summaryQueryTime.getStandardDeviation();
			double stdErrDurationQPerDoc = summaryQueryTime.getN() == 0 ? -1 : summaryQueryTime.getStandardDeviation()/Math.sqrt(summaryQueryTime.getN()); 
			System.out.println(c.getDescShort() + ", " + numFileProcessed + ", " + numCorrect + ", " + durationB + ", " + durationQ + ", " + c.size() + ", " + meanDurationQPerDoc + ", " + stdDevDurationQPerDoc + ", " + stdErrDurationQPerDoc);
			c.clear();
		}
	}

	// Evaluate the list of NN model specified @ nnList on all the file in test data dir
	public static void doEval(String testDataDir, List<Core> nnList) {
		doEval(testDataDir, nnList, 0);
	}
	/**
	 * @param args
	 */
	public static void main(String[] args) {
	}

}

