package exp.gold;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
import java.util.Vector;

import opennlp.tools.tokenize.Tokenizer;
import tools.FileReaderEnhance;
import tools.MapTools;
import tools.ObjectTools;
import tools.nlp.TokenizerSimple;
import tools.similarity.Cosine;
import tools.twitter.TweetTidy;
import dragon.nlp.tool.lemmatiser.EngLemmatiser;
import edu.hit.irlab.nlp.stopwords.StopWords;
import exp.URLContent1;

/**
 * Generate a rank of all highlights under a give topic
 * <br /> The rank of the highlights depends on the number of similar tweets they corresponding to. 
 * @author ljzhou
 *
 */
public class GenerateGold {
	static Vector<String> sw = (new StopWords("data/stopwords.dat")).getStopwords();
	static HashMap<String, Boolean> stopwords = new HashMap<String, Boolean>();
	static {
		for (String stopword : sw) {
			stopwords.put(stopword, true);
		}
	}
	static TokenizerSimple ts = new TokenizerSimple();
	static Tokenizer tokenizer = ts.getTokenizer();
	static EngLemmatiser el = new EngLemmatiser("data/_DragonTool/nlpdata/lemmatiser", true, false);

	public void run(String tPath, String hcPath, double simThreshold) {
		for (File file : (new File(hcPath).listFiles())) {
			String[] tweets = FileReaderEnhance.readToString(tPath + file.getName(), "UTF-8").split("\n");

			@SuppressWarnings("unchecked")
			ArrayList<URLContent1> ucs = (ArrayList<URLContent1>) ObjectTools.readFromFile(file);

			//initial all tweets to word-count pair;
			ArrayList<Map<String, Integer>> allTweets = new ArrayList<>();
			for (String tweet : tweets) {
				String tweet1 = TweetTidy.doTidyAll(tweet);
				String [] words = tokenizer.tokenize(tweet1);
				for (int k = 0; k < words.length; k++) {
					words[k] = el.lemmatize(words[k]);
				}
				Map<String, Integer> tWords = new HashMap<>();
				for (String word : words) {
					if (stopwords.containsKey(word)) {
						continue;
					}
					if (tWords.containsKey(word)) {
						tWords.put(word, tWords.get(word) + 1);
					} else {
						tWords.put(word, 1);
					}
				}
				allTweets.add(tWords);
			}
			
			//initial all highlights to word-count pair;
			ArrayList<Map<String, Integer>> allHLs = new ArrayList<>();
			ArrayList<String> allHLText = new ArrayList<>();
			for (URLContent1 uc : ucs) {
				for (String hl : uc.highlights) {
					allHLText.add(hl);
					String [] words = tokenizer.tokenize(hl);
					for (int k = 0; k < words.length; k++) {
						words[k] = el.lemmatize(words[k]);
					}
					Map<String, Integer> tWords = new HashMap<>();
					for (String word : words) {
						if (stopwords.containsKey(word)) {
							continue;
						}
						if (tWords.containsKey(word)) {
							tWords.put(word, tWords.get(word) + 1);
						} else {
							tWords.put(word, 1);
						}
					}
					allHLs.add(tWords);
				}
			}
			
			//calculate the cosine similarity and output similar tweets count for each highlights
			Map<String, Integer> htCount = new HashMap<>();
			for (int i = 0; i < allHLText.size(); i++) {
				int count = 0;
				for (int j = 0; j < allTweets.size(); j++) {
					if (Cosine.getCosine(i, j, allHLs, allTweets) >= simThreshold) {
						count++;
					}
				}
				htCount.put(allHLText.get(i), count);
//				System.out.println(count + "\t" + allHLText.get(i));
			}
			
			TreeMap<Integer, ArrayList<String>> sorted = MapTools.sortMap(htCount, true);
			for (Integer count : sorted.descendingKeySet()) {
				System.out.println(count);
				for (String hl : sorted.get(count)) {
					System.out.println("\t" + hl);
				}
			}
			
			ObjectTools.writeToFile(sorted, "./data/_newData/gold/" + file.getName());
		}
	}
	
	public static void main(String[] args) {

		try {
			System.setOut(new PrintStream(new File("./logs/GenerateGold.out.log"), "UTF-8"));
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (UnsupportedEncodingException e) {
			e.printStackTrace();
		}
		
		String hcPath = "data/_newData/obj/highlights_contents_ROUGE/";

		String tPath = "data/_newData/plainText_filtered/";
		
		double threshold = 0.6;

		GenerateGold gg = new GenerateGold();
		gg.run(tPath, hcPath, threshold);
	}
}
