package edu.cornell.cs4740.postag;

import java.io.BufferedWriter;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import edu.cornell.cs4740.postag.hmm.HMM_viterbi;
import edu.cornell.cs4740.postag.ngrams.Bigram;
import edu.cornell.cs4740.postag.ngrams.Trigram;
import edu.cornell.cs4740.postag.parsing.ParseFile;
import edu.cornell.cs4740.postag.parsing.Sentence;
import edu.cornell.cs4740.postag.parsing.WordContainer;
import edu.cornell.cs4740.postag.utils.Utilities;

public class PosTagging {

	/**
	 * @param args
	 * @throws Exception
	 */
	public static void main(String[] args) throws Exception {
		String trainingPath = args[0];
		String testPath = args[1];
		List<Sentence> allTraining = ParseFile.parseTrainingFile(trainingPath);
		List<Sentence> test = ParseFile.parseTestFile(testPath);

		int sizeOfEvaluationSet = (int) (0.3 * allTraining.size());

		// make training/validation set

		Collections.shuffle(allTraining);
		List<Sentence> validation = allTraining.subList(0, sizeOfEvaluationSet);
		List<Sentence> partialTraining = allTraining.subList(
				sizeOfEvaluationSet, allTraining.size());

		// make the bigram list

		List<Bigram<WordContainer>> bigramList = new ArrayList<Bigram<WordContainer>>();
		WordContainer prevWc = null;
		for (Sentence sentence : partialTraining) {
			List<WordContainer> wcList = sentence.getAllWords();
			for (WordContainer wc : wcList) {
				if (prevWc == null) {
					prevWc = wc;
				} else {
					Bigram<WordContainer> bigram = new Bigram<WordContainer>(
							prevWc, wc);
					prevWc = wc;
					bigramList.add(bigram);
				}
			}
		}

		// make the trigram list

		List<Trigram<WordContainer>> trigramList = new ArrayList<Trigram<WordContainer>>();
		WordContainer prevPrevWcTri = null;
		WordContainer prevWcTri = null;
		for (Sentence sentence : partialTraining) {
			List<WordContainer> wcList = sentence.getAllWords();
			for (WordContainer wc : wcList) {
				if (prevWcTri == null) {
					prevWcTri = wc;
				} else if (prevPrevWcTri == null) {
					prevPrevWcTri = prevWcTri;
					prevWcTri = wc;
				} else {
					Trigram<WordContainer> trigram = new Trigram<WordContainer>(
							prevPrevWcTri, prevWcTri, wc);
					prevPrevWcTri = prevWcTri;
					prevWcTri = wc;
					trigramList.add(trigram);
				}
			}
		}

		Set<String> partsOfSpeeches = new HashSet<String>();
		Set<String> words = new HashSet<String>();
		// create the POS lexicon
		for (Sentence sentence : partialTraining) {
			List<WordContainer> wcList = sentence.getAllWords();
			for (WordContainer wc : wcList) {
				partsOfSpeeches.add(wc.getPos());
				words.add(wc.getWord());
			}
		}

		List<String> posLexicon = new ArrayList<String>();
		int posindex = 0;
		for (String pos : partsOfSpeeches) {
			posLexicon.add(pos);
			Utilities.addIndexPOSPair(posindex, pos);
			posindex++;
		}

		// add words to word index map
		int wordIndex = 0;
		for (String word : words) {
			Utilities.add_word_index_pair(word, wordIndex);
			wordIndex++;
		}

		// ===========================================================================================

		int[][] counts = new int[Utilities.getNumWords()][Utilities.getNumPos()];
		int[] posCounts = new int[Utilities.getNumPos()];

		for (Sentence s : partialTraining) {
			for (WordContainer wc : s.getAllWords()) {
				counts[Utilities.word_to_index(wc.getWord())][Utilities
						.getIndexForPos(wc.getPos())]++;
				posCounts[Utilities.getIndexForPos(wc.getPos())]++;
			}
		}

		double[][] wordPosProbs = new double[Utilities.getNumWords()][Utilities
				.getNumPos()];
		for (int i = 0; i < Utilities.getNumWords(); i++) {
			for (int j = 0; j < Utilities.getNumPos(); j++) {
				wordPosProbs[i][j] = counts[i][j] * 1.0 / posCounts[j];
			}
		}

		// =======================================================================================

		int[][] bi_counts = new int[Utilities.getNumPos()][Utilities
				.getNumPos()];
		int[] precursorCount = new int[Utilities.getNumPos()];
		for (Bigram<WordContainer> bg : bigramList) {
			bi_counts[Utilities.getIndexForPos(bg.getItem2().getPos())][Utilities
					.getIndexForPos(bg.getItem1().getPos())]++;
			precursorCount[Utilities.getIndexForPos(bg.getItem1().getPos())]++;
		}

		double[][] bigramProbs = new double[Utilities.getNumPos()][Utilities
				.getNumPos()];
		for (int i = 0; i < bi_counts.length; i++) {
			for (int j = 0; j < bi_counts[i].length; j++) {
				bigramProbs[i][j] = bi_counts[i][j] * 1.0 / precursorCount[j];
			}
		}

		int[][][] tri_counts = new int[Utilities.getNumPos()][Utilities
				.getNumPos()][Utilities.getNumPos()];
		int[][] tri_precursorCount = new int[Utilities.getNumPos()][Utilities
				.getNumPos()];
		for (Trigram<WordContainer> tg : trigramList) {
			tri_counts[Utilities.getIndexForPos(tg.getItem3().getPos())][Utilities
					.getIndexForPos(tg.getItem1().getPos())][Utilities
					.getIndexForPos(tg.getItem2().getPos())]++;
			tri_precursorCount[Utilities.getIndexForPos(tg.getItem1().getPos())][Utilities
					.getIndexForPos(tg.getItem2().getPos())]++;
		}

		double[][][] trigramProbs = new double[Utilities.getNumPos()][Utilities
				.getNumPos()][Utilities.getNumPos()];
		for (int k = 0; k < tri_counts.length; k++) {
			for (int i = 0; i < tri_counts[k].length; i++) {
				for (int j = 0; j < tri_counts[k][i].length; j++) {
					trigramProbs[k][i][j] = tri_counts[k][i][j] * 1.0
							/ tri_precursorCount[i][j];
				}
			}
		}

		// =======================================================================================

		double[] firstPosProbs = new double[Utilities.getNumPos()];

		Map<String, Long> firstPosCounts = new HashMap<String, Long>();

		for (Sentence s : partialTraining) {
			String firstPos = s.getWord(0).getPos();
			long firstPosCount = firstPosCounts.containsKey(firstPos) ? firstPosCounts
					.get(firstPos).longValue() : 0;
			firstPosCount++;
			firstPosCounts.put(firstPos, new Long(firstPosCount));
		}

		for (int i = 0; i < firstPosProbs.length; i++) {
			String pos = Utilities.index_to_pos(i);
			long count = (firstPosCounts.containsKey(pos) ? firstPosCounts.get(
					pos).longValue() : 0);
			double prob = (((double) count) / ((double) partialTraining.size()));
			firstPosProbs[i] = prob;

		}

		double[][] secondPosProbs = new double[Utilities.getNumPos()][Utilities
				.getNumPos()];
		int[][] spp_counts = new int[Utilities.getNumPos()][Utilities
				.getNumPos()];
		int i_pos1, i_pos2;
		for (Sentence s : partialTraining) {
			if (s.getLength() > 1) {
				i_pos1 = Utilities.getIndexForPos(s.getWord(1).getPos());
				i_pos2 = Utilities.getIndexForPos(s.getWord(0).getPos());
				spp_counts[i_pos1][i_pos2]++;
			}
		}
		for (int i = 0; i < spp_counts.length; i++) {
			for (int j = 0; j < spp_counts[i].length; j++) {
				secondPosProbs[i][j] = spp_counts[i][j] * 1.0
						/ partialTraining.size();
			}
		}

		// ====================================================================================

		HMM_viterbi hmm = new HMM_viterbi(Utilities.getNumPos(), wordPosProbs,
				bigramProbs, trigramProbs, firstPosProbs, secondPosProbs);

		List<Sentence> toValidateByHmm = new ArrayList<Sentence>();
		for (Sentence s : validation) {
			List<WordContainer> allWords = new ArrayList<WordContainer>();

			for (WordContainer wc : s.getAllWords()) {
				WordContainer wcnew = new WordContainer(wc.getWord(),
						wc.getPos());
				allWords.add(wcnew);
			}

			toValidateByHmm.add(new Sentence(allWords));
		}
		// do hmm preds
		for (Sentence s : toValidateByHmm) {
			hmm.setSentence(s);
			List<String> poses = hmm.findMostProbablePOSs(true);
			for (int i = 0; i < poses.size(); i++) {
				s.getWord(i).setPos(poses.get(i));
			}
		}

		double hmmAccuracy = Utilities.computeAccuracy(toValidateByHmm,
				validation);
		System.out.println("Computing accuracy of Hidden Markov Model: ");
		System.out.println(hmmAccuracy);

		Baseline base = new Baseline(partialTraining);
		List<Sentence> predictions = base.getPredictions(validation);
		double accuracy = Utilities.computeAccuracy(predictions, validation);
		System.out.println(accuracy);
	}

	public static void outputFile(String filename, List<Sentence> predictions)
			throws Exception {
		BufferedWriter bw = new BufferedWriter(new FileWriter(filename));
		for (int i = 0; i < predictions.size(); i++) {
			Sentence sentence = predictions.get(i);
			bw.append("<s> <s>");
			bw.newLine();

			for (int j = 0; j < sentence.getLength(); j++) {
				WordContainer wc = sentence.getWord(j);
				String line = wc.getPos() + " " + wc.getWord();
				bw.append(line);
				bw.newLine();
			}
		}
	}
}
