package exp.ranking;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.TreeMap;
import java.util.Vector;

import opennlp.tools.tokenize.Tokenizer;
import tools.FileReaderEnhance;
import tools.FileWriterEnhance;
import tools.ObjectTools;
import tools.nlp.TokenizerSimple;
import tools.similarity.Cosine;
import tools.twitter.TweetTidy;
import edu.hit.irlab.nlp.stopwords.StopWords;
import exp.URLContent1;

/**
 * Output TOP-N tweets for each highlight
 * The similarity metric is Cosine
 * @author lzhou
 *
 */
public class Ex5 {

	static Vector<String> sw = (new StopWords("data/stopwords.dat")).getStopwords();
	static HashMap<String, Boolean> stopwords = new HashMap<String, Boolean>();
	static {
		for (String stopword : sw) {
			stopwords.put(stopword, true);
		}
	}
	static TokenizerSimple ts = new TokenizerSimple();
	static Tokenizer tokenizer = ts.getTokenizer();

	public void run(String tPath, String hcPath, String outputPath, int topN) {
		for (File file : (new File(hcPath).listFiles())) {
			String[] tweets = FileReaderEnhance.readToString(tPath + file.getName(), "UTF-8").split("\n");

			@SuppressWarnings("unchecked")
			ArrayList<URLContent1> ucs = (ArrayList<URLContent1>) ObjectTools.readFromFile(file);

			StringBuilder sb = new StringBuilder();
			for (URLContent1 uc : ucs) {
				sb.append("\n-----------------------------------------------------------------------------\n");
				sb.append(uc.url);
				sb.append("\n-----------------------------------------------------------------------------\n");
				for (int i = 0; i < uc.sortedContent.size(); i++) {
					sb.append("\nHighlights: ");
					sb.append(uc.highlights.get(i));
					sb.append('\n');
					TreeMap<Double, String> sorted = new TreeMap<>();
					HashMap<String, Boolean> doneTweets = new HashMap<>();
					String[] words = tokenizer.tokenize(uc.highlights.get(i));

					for (String tweet : tweets) {
						String tweet1 = TweetTidy.doTidyAll(tweet);
						if (doneTweets.containsKey(tweet1)) {
							continue;
						} else {
							doneTweets.put(tweet1, true);
						}
						Double f = Cosine.getCosine(words, tokenizer.tokenize(tweet1), stopwords);
						if (sorted.containsKey(f)) {
							sorted.put(f, sorted.get(f) + "\t,,\t" + tweet1);
						} else {
							sorted.put(f, tweet1);
						}
					}
					sb.append("=================================================\n");
					int count1 = 0;
					for (Double f : sorted.descendingKeySet()) {
						if (f == 0.0) {
							break;
						}
						sb.append(f);
						sb.append('\t');
						sb.append(sorted.get(f));
						sb.append('\n');
						count1++;
						if (count1 >= topN) {
							break;
						}
					}
					sb.append("\n=================================================\n");
				}
			}
			FileWriterEnhance fwe = new FileWriterEnhance(outputPath + file.getName(), "UTF-8");
			fwe.WriteToFile(sb.toString());
		}
	}
	
	public static void main(String[] args) {

		try {
			System.setOut(new PrintStream(new File("./logs/Ex5.out.log"), "UTF-8"));
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (UnsupportedEncodingException e) {
			e.printStackTrace();
		}
		
		String hcPath = "data/_newData/obj/highlights_contents_ROUGE/";

		String tPath = "data/_newData/plainText_filtered/";
		
		String output = "data/_newData/plainText_filtered_ranked_1/";

		/**
		 * Top-N ranked tweets for each highlight
		 */
		int topN = 3;
		
		Ex5 ex5 = new Ex5();
		ex5.run(tPath, hcPath, output, topN);
	}
}
