package exp.ranking;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.TreeMap;
import java.util.Vector;

import opennlp.tools.tokenize.Tokenizer;
import tools.FileReaderEnhance;
import tools.FileWriterEnhance;
import tools.ObjectTools;
import tools.nlp.TokenizerSimple;
import tools.similarity.Cosine;
import tools.twitter.TweetTidy;
import edu.hit.irlab.nlp.stopwords.StopWords;
import exp.URLContent1;

/**
 * Generate the gold standard for each topic <br>
 * The gold standard is a rank of tweets <br>
 * Use cosine to calculate the similarity
 * 
 * @author Lanjun
 * 
 */
public class Ex4_2 {

	static final String hcPath = "data/objs/highlights_contents_ROUGE/";

	static final String tPath = "data/test2/";

	/**
	 * Top-N ranked tweets for each sentences
	 */
	static final int topN1 = 3;

	static Vector<String> sw = (new StopWords("data/stopwords.dat")).getStopwords();
	static HashMap<String, Boolean> stopwords = new HashMap<String, Boolean>();
	static {
		for (String stopword : sw) {
			stopwords.put(stopword, true);
		}
	}
	static TokenizerSimple ts = new TokenizerSimple();
	static Tokenizer tokenizer = ts.getTokenizer();

	public static void main(String[] args) {

		try {
			System.setOut(new PrintStream(new File("./logs/Ex4_2.out.log")));
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		}

		for (File file : (new File(hcPath).listFiles())) {
			File tempFile = new File(tPath + file.getName());
			if (!tempFile.exists()) {
				continue;
			}
			String[] tweetsTemp = FileReaderEnhance.readToString(tempFile, "UTF-8").split("\n");
			ArrayList<String> tweets = new ArrayList<>();
			for (String l : tweetsTemp) {
				if (l == null || l.length() == 0) {
					continue;
				}
				String[] elements = l.split("\t;;\t");
				tweets.add(elements[0]);
			}

			@SuppressWarnings("unchecked")
			ArrayList<URLContent1> ucs = (ArrayList<URLContent1>) ObjectTools.readFromFile(file);

			StringBuilder sb = new StringBuilder();
			for (URLContent1 uc : ucs) {
				sb.append("\n-----------------------------------------------------------------------------\n");
				sb.append(uc.url);
				sb.append("\n-----------------------------------------------------------------------------\n");
				for (int i = 0; i < uc.highlights.size(); i++) {
					String content = uc.highlights.get(i);
					sb.append("\nHighlights: ");
					sb.append(content);
					sb.append('\n');
					String[] words = tokenizer.tokenize(content);

					TreeMap<Double, String> sorted = new TreeMap<>();
					HashMap<String, Boolean> doneTweets = new HashMap<>();
					for (String tweet : tweets) {
						String tweet1 = TweetTidy.doTidyAll(tweet);
						if (doneTweets.containsKey(tweet1)) {
							continue;
						} else {
							doneTweets.put(tweet1, true);
						}
						Double f = Cosine.getCosine(words, tokenizer.tokenize(tweet1), stopwords);
						if (sorted.containsKey(f)) {
							sorted.put(f, sorted.get(f) + "\t,,\t" + tweet);
						} else {
							sorted.put(f, tweet);
						}
					}
					sb.append("=================================================\nSentence: ");
					sb.append(content);
					sb.append("\n=================================================\n");
					int count1 = 0;
					for (Double f : sorted.descendingKeySet()) {
						if (f == 0.0) {
							break;
						}
						sb.append(f);
						sb.append('\t');
						sb.append(sorted.get(f));
						sb.append('\n');
						count1++;
						if (count1 >= topN1) {
							break;
						}
					}
				}
			}
			FileWriterEnhance fwe = new FileWriterEnhance("data/ranked_3/" + file.getName() + ".hl");
			fwe.WriteToFile(sb.toString());
		}
	}
}
