package exp;

import java.io.File;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
import java.util.Vector;

import opennlp.tools.tokenize.Tokenizer;
import tools.FileReaderEnhance;
import tools.nlp.TokenizerSimple;
import tools.similarity.Cosine;
import tools.twitter.TweetTidy;
import tools.twitter.URLTools;
import dragon.nlp.tool.lemmatiser.EngLemmatiser;
import edu.hit.irlab.nlp.stopwords.StopWords;

public class Statistics {
	static String path = "data/_newData/plainText_filtered/";
	static Double similarity = 0.7;
	static Vector<String> sw = (new StopWords("data/stopwords.dat")).getStopwords();
	static HashMap<String, Boolean> stopwords = new HashMap<>();
	static EngLemmatiser el = new EngLemmatiser("data/_DragonTool/nlpdata/lemmatiser", true, false);
	
	static {
		for (String stopword : sw) {
			stopwords.put(stopword, true);
		}
	}

	public static void main(String[] args) {
		TokenizerSimple ts = new TokenizerSimple();
		Tokenizer tokenizer = ts.getTokenizer();

		for (File file : (new File(path)).listFiles()) {
			String[] lines = FileReaderEnhance.readToString(file, "UTF-8").split("\n");
			double urlCount = 0;

			// The propotion of tweets containing URLs
			for (String line : lines) {
				line = TweetTidy.doTidyHTML(line);
				ArrayList<String> urls = URLTools.getURLs(line);
				if (urls.size() > 0) {
					urlCount++;
				}
			}

			System.out.println(file.getName() + "\t" + (int) urlCount + "/" + lines.length + "\t" + urlCount
					/ lines.length);

			// The propotion of tweets not repetitive
			for (int i = 0; i < lines.length; i++) {
				lines[i] = TweetTidy.doTidyAll(lines[i]).toLowerCase();
			}
			Calendar startTime = Calendar.getInstance();
			double nullCount = 0.0;
			
			//initialize all the word-count pairs for each tweet
			ArrayList<Map<String, Integer>> allTweets = new ArrayList<>();
			for (int i = 0; i < lines.length; i++) {
				Map<String, Integer> tWords = new HashMap<>();
				String[] tokens1 = tokenizer.tokenize(lines[i]);
				for (int k = 0; k < tokens1.length; k++) {
					tokens1[k] = el.lemmatize(tokens1[k]);
				}
				
				for (String word : tokens1) {
					if (stopwords.containsKey(word)) {
						continue;
					}
					if (tWords.containsKey(word)) {
						tWords.put(word, tWords.get(word) + 1);
					} else {
						tWords.put(word, 1);
					}
				}
				allTweets.add(tWords);
			}
			
			for (int i = 0; i < lines.length; i++) {
				if (lines[i] == null || lines[i].length() == 0) {
					continue;
				}
//				System.out.println("Dealing: " + lines[i]);
				for (int j = i + 1; j < lines.length; j++) {
					if (lines[j] == null || lines[j].length() == 0) {
						continue;
					}
					if (lines[i].equals(lines[j])) {
						lines[j] = null;
					} else {
						if (Cosine.getCosine(i, j, allTweets) > similarity) {
							lines[j] = null;
							nullCount++;
						}
					}
				}
			}
			Calendar endTime = Calendar.getInstance();

			System.out.println(file.getName() + "\t" + (int) (lines.length - nullCount) + "/" + lines.length + "\t"
					+ (lines.length - nullCount) / lines.length + "\t"
					+ (endTime.getTimeInMillis() - startTime.getTimeInMillis()));
		}
	}
}
