package DEEPERsource.DEEPERsource.source.machinelearning.evaluation;

import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import deeper.Interaction;
import deeper.InteractionType;

import weka.classifiers.Classifier;
import weka.core.Instance;
import machinelearning.SVM;
import machinelearning.utils.ClassificationResult;
import machinelearning.utils.EvaluationEntry;
import machinelearning.wekawrapper.Dataset;
import net.sf.javaml.classification.evaluation.PerformanceMeasure;

/**
 * Class contains two evaluation strategies - train-test and folds cv.
 * 
 * @author tfayruzo
 *
 */
public class EvaluationProcedure {
	
	//treshold for negative class separation (not for SVM. For svm see ClassificationResult)
	public double treshold = 0.5;
	private List<String> prediction = new ArrayList<String>();

	/**
	 * Peculiarity of this method is that it could
	 * evaluate several positive classes against one negative
	 * confusions between different positive classes are considered as false positives.
	 * @param train
	 * @param test
	 * @param classifier
	 * @param negativeClassValue - index of negative class
	 * @param positiveClassValues - indices of positive classes
	 * @return
	 */
	public PerformanceMeasure trainTestEvaluation(Dataset train, Dataset test, Classifier classifier, int negativeClassValue, int...positiveClassValues){
		PerformanceMeasure result = null;
		try {
			result = new PerformanceMeasure();
			if(classifier instanceof SVM){
				if(train.isPrecomputed()){
					//String outFile = "resource/svm/result.svm";
					String outFile = "/home/tfayruzo/deeper/libsvm-2.85/result.svm";
					((SVM)classifier).buildClassifier(train.getPrecomputedKernel());
					((SVM)classifier).testOnPrecomputedKernel(test.getPrecomputedKernel(), outFile);				
					ClassificationResult cr = new ClassificationResult();
					List<String> foldPrediction = cr.getClassification4SVM(outFile, test.getClassValues(), negativeClassValue, positiveClassValues);
					prediction.addAll(foldPrediction);
					result = cr.getPerformance4SVM(test.getClassValues(), foldPrediction, negativeClassValue);
				}
			}else{
				classifier.buildClassifier(train.getDataset());
				//classifying test instances
				for(Enumeration<Instance> e = test.getDataset().enumerateInstances();e.hasMoreElements();){
					Instance inst = e.nextElement();
					double[] dist = classifier.distributionForInstance(inst);
					double sum = 0;
					int maxInd = 0;
					double maxElem = 0;
					//looking for the most probable positive class (although negative class could have the highest value) 
					//and computing the summary probability of positive classification
					for(int i : positiveClassValues){
						sum += dist[i];
						if(maxElem<dist[i]){
							maxElem = dist[i];
							maxInd = i;
						}							
					}
					int pred = maxInd;
					if(sum<treshold)
						pred = negativeClassValue;
					prediction.add(Double.toString(sum)+"\t"+Integer.toString(pred));
					double real = inst.classValue();
					if(real == negativeClassValue){
						if(real == pred)
							result.trueNegatives++;
						else
							result.falsePositives++;
					}else{
						if(real == pred)
							result.truePositives++;
						else
							if(pred==negativeClassValue)
								result.falseNegatives++;
							else
								result.falsePositives++;
					}
					/*
					//check if probability of positive classification above treshold
					if(sum>=treshold){
						//and if yes, accepting that the class is classified positively
						//and check, if the positive class chosen properly
						if(real==maxInd)
							result.truePositives++;
						else
							//if the instance misclassified
							//(i.e. it is a negative instance, or positive class wasn't guessed)
							result.falsePositives++;
					}else{
						//if the instance is predicted to be negative
						if(real==negativeClassValue)
							result.trueNegatives++;
						else
							result.falseNegatives++;
					}
					*/
				}
			}			
		} catch (Exception e) {
			e.printStackTrace();
		}		
		return result;
	}
	
	/**
	 * Simple stratified cross-validation evaluation
	 * @param data
	 * @param classifier
	 * @param numFolds
	 * @param negativeClassValue
	 * @param positiveClassValues
	 * @return
	 */
	public PerformanceMeasure crossValidation(Dataset data, Classifier classifier, int numFolds, int negativeClassValue, int...positiveClassValues){
		PerformanceMeasure[] foldPerformance = new PerformanceMeasure[numFolds];
		Dataset cvData = new Dataset(data);
		cvData.stratify(numFolds);
		for(int i = 0; i<numFolds; i++){
			Dataset test = cvData.getTestCVFold(numFolds, i);			
			Dataset train = cvData.getTrainCVFold(numFolds, i);
			foldPerformance[i] = trainTestEvaluation(train, test, classifier, negativeClassValue, positiveClassValues);
		}
		int tp = 0, tn = 0, fp = 0, fn = 0;
        for (int i = 0; i < numFolds; i++) {
            tp += foldPerformance[i].truePositives;
            tn += foldPerformance[i].trueNegatives;
            fp += foldPerformance[i].falsePositives;
            fn += foldPerformance[i].falseNegatives;
        }
        return new PerformanceMeasure(tp, tn, fp, fn);
	}
	
	/**
	 * Cross-validation on predefined folds
	 * @param classifier
	 * @param folds
	 * @param negativeClassValue
	 * @param positiveClassValues
	 * @return
	 */
	public PerformanceMeasure crossValidation(Classifier classifier, Dataset[] trainFolds, Dataset[] testFolds, int negativeClassValue, int...positiveClassValues){
		PerformanceMeasure[] foldPerformance = new PerformanceMeasure[trainFolds.length];
		for(int i = 0; i<trainFolds.length; i++){
			foldPerformance[i] = trainTestEvaluation(trainFolds[i], testFolds[i], classifier, negativeClassValue, positiveClassValues);
		}
		int tp = 0, tn = 0, fp = 0, fn = 0;
        for (int i = 0; i < trainFolds.length; i++) {
            tp += foldPerformance[i].truePositives;
            tn += foldPerformance[i].trueNegatives;
            fp += foldPerformance[i].falsePositives;
            fn += foldPerformance[i].falseNegatives;
        }
        return new PerformanceMeasure(tp, tn, fp, fn);
	}
	
	/**
	 * 
	 * @param trainFile
	 * @param testFile
	 * @param classifier
	 * @param negativeClassValue
	 * @param positiveClassValues
	 * @return
	 */
	public String SVMEvaluationForLLL(Dataset train, Dataset test, SVM classifier, int negativeClassValue, int...positiveClassValues){
		Map<String, EvaluationEntry> result = new HashMap<String, EvaluationEntry>();
		List<Interaction> lInter = test.getInteractions();
		String outFile = "resource/svm/result.svm";
		//running the classifier
		((SVM)classifier).buildClassifier(train.getPrecomputedKernel());
		((SVM)classifier).testOnPrecomputedKernel(test.getPrecomputedKernel(), outFile);
		ClassificationResult cr = new ClassificationResult();
		//Map<Interaction, Integer> classification = cr.getClassificationFile4SVM(outFile, test, negativeClassValue, positiveClassValues);
		prediction.addAll(cr.getClassification4SVM(outFile, test.getClassValues(), negativeClassValue, positiveClassValues));
		
		//for(Interaction i : classification.keySet()){
		for(int k = 0, size = lInter.size(); k< size; k++){
			Interaction i = lInter.get(k);
			String id = i.origSentId;
			int predClass = Integer.parseInt(prediction.get(k).split("\t")[1]);
				
			EvaluationEntry ee = result.get(id);
			if(ee==null){
				ee = new EvaluationEntry();
				ee.sentenceId = id;
			}
			if(predClass == negativeClassValue){
				//do nothing
			}else if(predClass == InteractionType.REALINVERSE.ordinal()){
				ee.agents.add(i.p2.name);
				ee.targets.add(i.p1.name);
				ee.interactions.add(new String[]{i.p2.name,i.p1.name});
			}else{
				ee.agents.add(i.p1.name);
				ee.targets.add(i.p2.name);
				ee.interactions.add(new String[]{i.p1.name,i.p2.name});
			}
			result.put(id, ee);
		}
		StringBuffer sb = new StringBuffer("% Participant name: Timur Fayruzov\n" +
				"% Participant institution: UGent\n" +
				"% Participant email address: timur.fayruzov@ugent.be\n" +
				"% Format checked: YES\n" +
				"% Basic data: YES\n" +
				"% Coreference distinction: WITH COREFERENCE and WITHOUT COREFERENCE\n");
		for(String key : result.keySet()){
			sb.append(result.get(key).toString());
		}		
		return sb.toString();
	}
	
	public void writePrediction(PrintWriter writer){
		for(String p : prediction)
			writer.println(p);	
	}

	public List<String> getPrediction() {
		return prediction;
	}
	
	public void clearPrediction(){
		prediction.clear();
	}
	
}
