package exp.eval;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.TreeMap;
import java.util.Vector;

import opennlp.tools.tokenize.Tokenizer;
import tools.FileReaderEnhance;
import tools.FileWriterEnhance;
import tools.nlp.TokenizerSimple;
import tools.similarity.Cosine;
import tools.twitter.TweetTidy;
import edu.hit.irlab.nlp.stopwords.StopWords;

public class ConvertL2R {

	static String topic = "Oscar 2013";

	public static void main(String[] args) {

		try {
			System.setOut(new PrintStream(new File("./logs/EvalL2R.out.log"), "UTF-8"));
			// System.setErr(new PrintStream(new File("./logs/fe.err.log"),
			// "UTF-8"));
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (UnsupportedEncodingException e) {
			e.printStackTrace();
		}

		Vector<String> sw = (new StopWords("data/stopwords.dat")).getStopwords();
		HashMap<String, Boolean> stopwords = new HashMap<String, Boolean>();
		for (String stopword : sw) {
			stopwords.put(stopword, true);
		}
		TokenizerSimple ts = new TokenizerSimple();
		Tokenizer tokenizer = ts.getTokenizer();

		String[] resultText = FileReaderEnhance.readToString("data/_newData/experiment/1/1.result", "UTF-8").split(
				"\r\n");
		String[] tweetText = FileReaderEnhance.readToString("data/_newData/plainText_filtered/" + topic, "UTF-8")
				.split("\n");
		Double[] result = new Double[resultText.length];
		for (int i = 0; i < resultText.length; i++) {
			result[i] = Double.parseDouble(resultText[i]);
		}

		TreeMap<Double, ArrayList<Integer>> sorted = new TreeMap<>();

		for (int i = 0; i < result.length; i++) {
			if (sorted.containsKey(result[i])) {
				ArrayList<Integer> resultlist = sorted.get(result[i]);
				resultlist.add(i);
			} else {
				ArrayList<Integer> resultlist = new ArrayList<>();
				resultlist.add(i);
				sorted.put(result[i], resultlist);
			}
		}

		ArrayList<String> output = new ArrayList<>();
		aaa: for (Double key : sorted.keySet()) {
			ArrayList<Integer> resultlist = sorted.get(key);
			System.out.println(key + "\t" + resultlist);
			bbb: for (int lineNum : resultlist) {
				boolean flag = false;
				for (String o : output) {
					if (Cosine.getCosine(tokenizer.tokenize(TweetTidy.doTidyAll(tweetText[lineNum])),
							tokenizer.tokenize(o), stopwords) > 0.8) {
						flag = true;
						continue bbb;
					}
				}
				if (flag) {
					continue;
				} else {
					System.out.println("\t" + TweetTidy.doTidyAll(tweetText[lineNum]) + "\t" + tweetText[lineNum]);
					output.add(TweetTidy.doTidyAll(tweetText[lineNum]));
					if (output.size() > 1000) {
						break aaa;
					}
				}
			}
		}

		StringBuilder sb = new StringBuilder();
		for (String o : output) {
			sb.append(o);
			sb.append('\n');
		}
		FileWriterEnhance fwe = new FileWriterEnhance("data/_newData/L2R_result/" + topic, "UTF-8");
		fwe.WriteToFile(sb.toString());
	}
}
