package thesis;

import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.List;
import java.util.StringTokenizer;


import com.aliasi.classify.JointClassification;
import com.aliasi.classify.NaiveBayesClassifier;
import com.mongodb.DBObject;

import experiment.Dimension;

public class QualityCalculator {
	private static final String NEWSFILE = "newsorganizations.txt";

	private static final double GENSCOREWEIGHT = 1;
	private static final double SASCOREWEIGHT = 0;

	private static final double LENGTHWEIGHT = 0.334;
	private static final double GEOSWEIGHT = 0;
	private static final double AUTHORITYWEIGHT = 0.333;
	private static final double URLWEIGHT = 0.333;

	private List<Concept> concepts;
	private Dimension conceptDictionary = new Dimension();
	private Hashtable<String, NaiveBayesClassifier> classifiers = new Hashtable<String, NaiveBayesClassifier>();

	private List<String> newsOrganizations = new ArrayList<String>();
	private boolean useSimpleClassifier = false;

	// CONSTRUCTOR
	public QualityCalculator(FilteringAlgorithm fAlg) {
		concepts = FSModule.getConcepts();

		BBCNewsDatasetClassifierFactory classifierFactory = new BBCNewsDatasetClassifierFactory(
				fAlg);
		for (Concept concept : concepts) {
			String conceptId = concept.getId();
			NaiveBayesClassifier classifier = classifierFactory
					.getTrainedClassifier(conceptId);
			classifiers.put(conceptId, classifier);
		}

		getNewsOrganizations();
	}

	public QualityCalculator() {
		concepts = FSModule.getConcepts();
		getNewsOrganizations();
		this.useSimpleClassifier = true;
	}

	// COMPUTE QUALITY
	public double computeInfo(DBObject tweet, String preprocessedText) {
		InputTweetInfoExtractor infoExtr = new InputTweetInfoExtractor(tweet);

		// Computation of a general tweet score
		int textLength = preprocessedText.length();
		double lengthScore = (double) textLength / 140;
		if (lengthScore > 1) {
			lengthScore = 1;
		}

		double geoScore = 0;
		boolean containGeoCoordinates = infoExtr.hasCoordinates();
		if (containGeoCoordinates)
			geoScore = 1;

		double urlScore = 0;
		boolean containURL = infoExtr.containUrls();
		if (containURL) {
			urlScore = 1;
		}

		double authorityScore = (double) infoExtr.getUserListedCount() / 1000;
		if (authorityScore > 1) {
			authorityScore = 1;
		}

		double generalTweetScore = (lengthScore * LENGTHWEIGHT)
				+ (geoScore * GEOSWEIGHT) + (urlScore * URLWEIGHT)
				+ (authorityScore * AUTHORITYWEIGHT);

		// Computation of a Situational Awareness oriented tweet score
		double saTweetScore = 0;

		boolean newsTweet = false;

		for (String newsWord : newsOrganizations) {
			if (preprocessedText.contains(" " + newsWord + " ")) {
				newsTweet = true;
			}
		}

		if (newsTweet) {
			String addressRegex = ".*((in)|(at)|(on)|(In)|(At)|(On))+(\\s[0-9]*)*\\s(([A-Z][a-z]* )|([0-9]+[a-z]* ))+((St)|(Street)|(Road)|(Rd)|(Boulevard)|(Blvd)|(Plaza)|(Plz)|(Alley)|(Aly)|(Gateway)|(Gtwy)|(Parkway)|(Pkwy)|(Avenue)|(Ave)|(Hwy)|(Highway))(( .*)|(\\z))";

			if (infoExtr.getText().matches(addressRegex)) {
				saTweetScore = 1;
			}
		}

		// Computation of total tweet score
		double tweetScore = (GENSCOREWEIGHT * generalTweetScore)
				+ (SASCOREWEIGHT * saTweetScore);

		return tweetScore;
	}

	private double getRelProb(Concept conc, String text,
			InputTweetInfoExtractor infoExtr) {
		double relevanceScore = 0;
		if (this.useSimpleClassifier) {
//			final double base = 0.4;
//			String normText = infoExtr.getText().toLowerCase();
//			int count = 0;
//			for (String key : conc.getKeywords()) {
//				if (normText.contains(key.toLowerCase().trim())) {
//					count++;
//				}
//			}
//			return 1 - Math.pow(base, count);

			final double base = 0.4;
			String normText = infoExtr.getText().toLowerCase();
			ArrayList<String> words = new ArrayList<String>();
			StringTokenizer st = new StringTokenizer(normText, " ");
			while (st.hasMoreTokens()) {
				words.add(st.nextToken());
			}
			int count = 0;
			for (String key : conc.getKeywords()) {
				StringTokenizer stk = new StringTokenizer(key, " ");
				ArrayList<String> keywords = new ArrayList<String>();
				while (stk.hasMoreTokens()) {
					keywords.add(stk.nextToken());
				}
				boolean match = false;
				for (int i = 0; i < words.size() - keywords.size() + 1; i++) {
					int j = 0;
					for (; j < keywords.size(); j++) {
						if (!words.get(i + j).equalsIgnoreCase(keywords.get(j))) {
							break;
						}
					}
					if (j == keywords.size()) {
						match = true;
						break;
					}
				}
				count += match ? 1 : 0;
			}
			return 1 - Math.pow(base, count);
		} else {
			String conceptId = conc.getId();

			NaiveBayesClassifier classifier = classifiers.get(conceptId);

			JointClassification jc = classifier.classify(text);
			relevanceScore = jc.conditionalProbability("positive");
			return relevanceScore;
		}
	}

	public List<Relevance> computeRelevances(DBObject tweet,
			String preprocessedText) {
		InputTweetInfoExtractor infoExtr = new InputTweetInfoExtractor(tweet);
		long tweetId = infoExtr.getId();

		List<Relevance> relevances = new ArrayList<Relevance>();

		for (Concept conc : concepts) {
			double relevanceScore = getRelProb(conc, preprocessedText, infoExtr);
			if (relevanceScore > 0) {
				relevances.add(new Relevance(conc.getId(), relevanceScore));

				HashMap<Long, Double> tweetsRelatedToConc = conceptDictionary
						.value2obj.get(conc.getId());
				if (tweetsRelatedToConc == null) {
					tweetsRelatedToConc = new HashMap<Long, Double>();
					conceptDictionary.value2obj.put(conc.getId(), tweetsRelatedToConc);
				}
				tweetsRelatedToConc.put(tweetId, relevanceScore);
			}
		}

		return relevances;
	}

	public double computeQuality(double info, List<Relevance> relevances) {
		double relevancesProduct = 1;
		double relevanceScore;

		for (Relevance rel : relevances) {
			relevancesProduct *= (1 - rel.getRelevance());
		}

		relevanceScore = 1 - relevancesProduct;

		double quality = info + relevanceScore;

		return info;
	}

	// DICTIONARY FOR COVERAGE
	public Dimension getConceptDictionary() {
		return conceptDictionary;
	}

	// FILE READER
	private void getNewsOrganizations() {
		BufferedReader br = null;

		try {
			br = new BufferedReader(new FileReader(NEWSFILE));
		} catch (FileNotFoundException e) {
			e.printStackTrace();
			System.exit(-1);
		}
		try {
			while (br.ready()) {
				StringTokenizer tk = new StringTokenizer(br.readLine(), ";");
				while (tk.hasMoreTokens()) {
					String next = tk.nextToken();
					newsOrganizations.add(next);
				}
			}
			br.close();
		} catch (IOException e) {
			e.printStackTrace();
			System.exit(-1);
		}
	}
}
