package nlp;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;

import edu.stanford.nlp.tagger.maxent.MaxentTagger;

public class ProcessCorpus {

	public static String clean(String content) {
		return content.replaceAll("==", ".").replaceAll("-", "").replaceAll(
				"\\|", "").replaceAll("@", "").replaceAll("!", ".").replaceAll(
				",", "");
	}

	/**
	 * 
	 * @param trainingFolder
	 * @param corpusFile
	 * @param general
	 * @throws IOException
	 */
	public static void createTagFiles(String trainingFolder, String corpusFile,
			boolean general) throws IOException {

		Map<String, ArrayList<String>> tagMap = new HashMap<String, ArrayList<String>>();
		ArrayList<String> excludedTags = new ArrayList<String>();

		String excl[] = { "m", "rt", "x", "z", "2" };
		for (String s : excl)
			excludedTags.add(s);

		BufferedReader br = new BufferedReader(new FileReader(trainingFolder
				+ File.separatorChar + corpusFile));

		String line;
		while ((line = br.readLine()) != null) {
			TaggedUtterance tu = new TaggedUtterance(line);
			if (general) {
				if (!tu.tags.isEmpty()) {
					String tag = tu.tags.get(0);

					if (tagMap.get(tag) == null)
						tagMap.put(tag, new ArrayList<String>());

					tagMap.get(tag).add(clean(tu.textContent));
				}
			} else {
				for (String tag : tu.tags) {

					if (tagMap.get(tag) == null)
						tagMap.put(tag, new ArrayList<String>());

					tagMap.get(tag).add(clean(tu.textContent));
				}
			}
		}

		Set<String> keys = tagMap.keySet();

		for (String key : keys) {
			if (!excludedTags.contains(key)) {

				FileWriter outFile = new FileWriter(trainingFolder
						+ File.separatorChar + (general ? "general" : "all")
						+ File.separatorChar + "raw" + File.separatorChar + key);
				PrintWriter out = new PrintWriter(outFile);

				ArrayList<String> values = tagMap.get(key);
				for (String utterance : values)
					out.println(utterance);
				out.close();
			}
		}

	}

	/**
	 * corpus -> raw corpus -> stemmed corpus
	 * 
	 * @param corpusFile
	 * @param stemmedCorpusFile
	 */
	public static void stemCorpus(String trainingFolder, String corpusFile,
			String stemmedCorpusFile) {
		try {
			BufferedReader br = new BufferedReader(new FileReader(
					trainingFolder + File.separatorChar + corpusFile));

			// create raw corpus
			FileWriter outFile = new FileWriter(trainingFolder
					+ File.separatorChar + corpusFile + "_raw");
			PrintWriter out = new PrintWriter(outFile);

			String line;
			while ((line = br.readLine()) != null) {
				TaggedUtterance tu = new TaggedUtterance(line);
				out.println(clean(tu.textContent));
			}

			out.close();

			Stemmer.fileStemming(trainingFolder + File.separatorChar
					+ corpusFile + "_raw", trainingFolder + File.separatorChar
					+ stemmedCorpusFile);

		} catch (IOException e) {
			System.out.println("IO Exception...");
			e.printStackTrace();
		}
	}

	/**
	 * 
	 * @param stemmedFile
	 * @param frequencyFile
	 */
	public static void computeAllWordFrequency(String corpusFolder,
			String stemmedFile, String frequencyFile) {
		Map<String, Integer> allWordFrequency = new HashMap<String, Integer>();

		try {
			BufferedReader br = new BufferedReader(new FileReader(corpusFolder
					+ File.separatorChar + stemmedFile));

			String line;
			while ((line = br.readLine()) != null) {
				String w[] = line.split("\\ |\\.|\\?|,");
				for (String s : w)
					if (s.length() > 0) {
						if (!allWordFrequency.containsKey(s))
							allWordFrequency.put(s, 0);
						allWordFrequency.put(s, allWordFrequency.get(s) + 1);
					}
			}

			FileWriter outFile = new FileWriter(corpusFolder
					+ File.separatorChar + frequencyFile);
			PrintWriter out = new PrintWriter(outFile);

			List<Map.Entry<String, Integer>> wordFr = new ArrayList<Map.Entry<String, Integer>>(
					allWordFrequency.entrySet());
			Collections.sort(wordFr,
					new Comparator<Map.Entry<String, Integer>>() {

						@Override
						public int compare(Entry<String, Integer> a,
								Entry<String, Integer> b) {
							return b.getValue().compareTo(a.getValue());
						}

					});

			for (Map.Entry<String, Integer> key : wordFr)
				out.println(key.getKey() + "\t" + key.getValue());

			out.close();

		} catch (FileNotFoundException e) {
			System.out.println("File not found exception...");
			e.printStackTrace();
		} catch (IOException e) {
			System.out.println("IO exception...");
			e.printStackTrace();
		}
	}

	public static void computeKeywords(String tagfileName) {
	}

	/**
	 * @param args
	 */
	public static void main(String[] args) throws Exception {

		boolean general = true;

		ProcessCorpus.createTagFiles("test_training", "corpus", general);
		ProcessCorpus.createTagFiles("test_training", "corpus", !general);

		ProcessCorpus.stemCorpus("test_training", "corpus", "corpus_stemmed");

		ProcessCorpus.computeAllWordFrequency("test_training",
				"corpus_stemmed", "corpus_freq");

		Stemmer.stemRawTagFiles("test_training");

		MaxentTagger tagger = new MaxentTagger(args[0]);
		PosTagger posTagger = new PosTagger(tagger);

		posTagger.computeFrequentWords("test_training/all");
		PosTagger.posTagTrainingFolder(posTagger, "test_training");

	}
}
