package matching;

import java.util.ArrayList;
import java.util.HashMap;

import data.RTEData;
import data.RTENode;
import data.RTEPreprocessedData;
import data.RTESentence;

public abstract class Matcher {

	private HashMap<Integer, String> judgementData;
	protected int entailments;
	protected float correctness;
	protected int numberOfTextCases;
	protected double threshold;
	String task;
	protected static HashMap<String, Float> idfNormal;
	protected static HashMap<String, Float> idfPreProcessed;
	public ArrayList<Integer> errors;
	
	static {
		calculateInverseDocumentFrequency();
		calculateInverseDocumentFrequencyPreProcessed();
	}
	
	public Matcher(String task, double threshold) {
		judgementData = new HashMap<Integer, String>();
		this.task = task;
		entailments = 0;
		correctness = 0;
		this.threshold = threshold;
		errors  =new ArrayList<Integer>();
	}
	
	public abstract void match(); 
	
	private static void calculateInverseDocumentFrequency() {
		idfNormal = new HashMap<String, Float>();
		
		ArrayList<RTEData> data = RTEData.RTEDataList;
		for (RTEData rteData : data) {
			String[] hypothesisWords = rteData.getHypothesis().split(" ");
			String[] textWords = rteData.getText().split(" ");

			for(int i = 0; i < textWords.length; i++) {
				float count = (idfNormal.containsKey(textWords[i]) ? idfNormal.get(textWords[i]) : 0);
				idfNormal.put(textWords[i], ++count);
			}
			
			for(int i = 0; i < hypothesisWords.length; i++) {
				float count = (idfNormal.containsKey(hypothesisWords[i]) ? idfNormal.get(hypothesisWords[i]) : 0);
				idfNormal.put(hypothesisWords[i], ++count);	
			}
		}
		
		for (String key : idfNormal.keySet()) {
			idfNormal.put(key, 1 / (float)idfNormal.get(key));
//			System.out.println("WORD: " + key + ". Count: " + idfNormal.get(key));
		}
	}
	
	private static void calculateInverseDocumentFrequencyPreProcessed() {
		idfPreProcessed = new HashMap<String, Float>();
		
		ArrayList<RTEPreprocessedData> data = RTEPreprocessedData.rtePreprocessedDataList;
		//Iterate over all pairs
		for (RTEPreprocessedData rteData : data) {
			ArrayList<RTESentence> textSentences = rteData.getTextSentences();
			
			//iterate over all the sentences
			for (RTESentence rteSentence : textSentences) {
				ArrayList<RTENode> nodes = rteSentence.getNodes();
				
				for(int i = 0; i < nodes.size(); i++) {
					String lemma = nodes.get(i).getLemma();

					float count = (idfPreProcessed.containsKey(lemma) ? idfPreProcessed.get(lemma) : 0);
					idfPreProcessed.put(lemma, ++count);
				}
			}
			
			ArrayList<RTESentence> hypSentences = rteData.getHypothesisSentences();	
			for (RTESentence rteSentence : hypSentences) {
				ArrayList<RTENode> nodes = rteSentence.getNodes();
				
				for(int i = 0; i < nodes.size(); i++) {
					String lemma = nodes.get(i).getLemma();

					float count = (idfPreProcessed.containsKey(lemma) ? idfPreProcessed.get(lemma) : 0);
					idfPreProcessed.put(lemma, ++count);
				}
			}
		}
		
		for (String key : idfPreProcessed.keySet()) {
			idfPreProcessed.put(key, 1 / (float)idfPreProcessed.get(key));
//			System.out.println(key + ": " + idfPreProcessed.get(key));
		}
	}
	
	public static void calculateIDFStripped() {
		idfPreProcessed = new HashMap<String, Float>();
		
		ArrayList<RTEPreprocessedData> data = RTEPreprocessedData.rtePreprocessedDataList;
		//Iterate over all pairs
		for (RTEPreprocessedData rteData : data) {
			ArrayList<RTESentence> textSentences = rteData.getTextSentences();
			
			//iterate over all the sentences
			for (RTESentence rteSentence : textSentences) {
				ArrayList<RTENode> nodes = rteSentence.getNodes();
				
				for(int i = 0; i < nodes.size(); i++) {
					String lemma = nodes.get(i).getLemma();
					lemma = lemma.replaceAll("'", "");
					nodes.get(i).setLemma(lemma);
					float count = (idfPreProcessed.containsKey(lemma) ? idfPreProcessed.get(lemma) : 0);
					idfPreProcessed.put(lemma, ++count);
				}
			}
			
			ArrayList<RTESentence> hypSentences = rteData.getHypothesisSentences();	
			for (RTESentence rteSentence : hypSentences) {
				ArrayList<RTENode> nodes = rteSentence.getNodes();
				
				for(int i = 0; i < nodes.size(); i++) {
					String lemma = nodes.get(i).getLemma();
					lemma = lemma.replaceAll("'", "");
					nodes.get(i).setLemma(lemma);
					float count = (idfPreProcessed.containsKey(lemma) ? idfPreProcessed.get(lemma) : 0);
					idfPreProcessed.put(lemma, ++count);
				}
			}
		}
		
		for (String key : idfPreProcessed.keySet()) {
			idfPreProcessed.put(key, 1 / (float)idfPreProcessed.get(key));
		}
	}
	
	public void addJudgement(int id, boolean val) {
		judgementData.put(id, (val == true) ? "YES" : "NO");
	}
	
	public HashMap<Integer, String> getJudgementData() {
		return this.judgementData;
	}
	
	@Override
	public String toString() {
		return task + ": #entailment: " + entailments + ", correctness: " + correctness + ", cases: " + numberOfTextCases + ". Threshold: " + threshold;
	}
}
