package thesis;

import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.StringTokenizer;


import org.apache.commons.math3.distribution.NormalDistribution;

import UI.ConceptSelectionDialog;
import UI.ParameterSelectionDialog;
import UI.VisualizationFrame;
import clustering.Cluster;
import clustering.ClusterItem;
import clustering.ClusteringAlgorithm;

import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.mongodb.Mongo;

import experiment.Dimension;

public class FSModule {
	public static final double relThresh = 0;
	public static final boolean DEBUG = false;
	public static final int maxInfoSize = 2;

	private static final String MONGODBHOST = "sensorium-66.ics.uci.edu";
	private static final int MONGODBPORT = 27017;

	// private static final String INPUTDB = "sandy";
	// private static final String INPUTCOLL = "first";
	// private static final String INPUTDB = "sandy";
	// private static final String INPUTCOLL = "original";
	private static final String INPUTDB = "inputDB";
	private static final String INPUTCOLL = "inputCollection";
	// private static final String INPUTDB = "Test";
	// private static final String INPUTCOLL = "OneDayNews";

	private static final String OUTPUTDB = "FSModule";
	private static final String TWEETSCOLL = "TweetsInfo";
	private static final String CLUSTCOLL = "Clusters";

	private static final String SAVRESDB = "Results";
	private static final String SUMMCOLL = "Summary_";
	private static final String STATSCOLL = "Statistics";

	// private static final String CONCFILE = "conceptsSA.txt";
	private static final String CONCFILE = "conceptsTopic.txt";
	// private static final String CONCFILE = "conceptsNews.txt";
	private static final String CLASSIFIERDB = "Classifier";

	private static List<Concept> concepts = null;

	private static int M;
	private static int numberOfPages;

	private static double qualityWeight;
	private static double coverageWeight;
	private static double diversityWeight;
	private static GregorianCalendar begin_date;
	private static GregorianCalendar end_date;
	private static int tweetsNumberLimit;

	private static LocationBounds areaOfInterest;

	@SuppressWarnings("unused")
	private static Mongo remoteMongo;
	private static Mongo localMongo;

	private static int numberOfTweets = 0;
	private static int numberOfTweetsPrefiltering = 0;
	private static long executionTime = 0;
	private static String summarizerType;

	public static void getDataFromDB(HashMap<Long, DataObject> memoryTweets,
			List<Dimension> dimensions,
			ArrayList<Double> weights) {
		Set<ClusterItem> clusterItems = new HashSet<ClusterItem>();

		connectToLocalMongoDB();
		// connectToRemoteMongoDB();

		dropOutputDatabase(localMongo);
		// dropOutputDatabase(remoteMongo);

		// CHOOSING CONCEPTS OF INTEREST
		readConceptsFile();
		ConceptSelectionDialog concDialog = new ConceptSelectionDialog(concepts);
		concDialog.setModal(true);
		concDialog.setVisible(true);

		// CHOOSING PARAMETERS
		ParameterSelectionDialog paramDialog = new ParameterSelectionDialog();
		paramDialog.setModal(true);
		paramDialog.setVisible(true);
		AlgorithmParameters algParameters = paramDialog
				.getAlgorithmParameters();
		begin_date = algParameters.getBeginDate();
		end_date = algParameters.getEndDate();
		tweetsNumberLimit = algParameters.getLimitOfTweets();
		M = algParameters.getSummaryDim();
		numberOfPages = algParameters.getNumberOfPages();
		areaOfInterest = algParameters.getAreaOfInterest();
		qualityWeight = algParameters.getQualityWeight();
		coverageWeight = algParameters.getCoverageWeight();
		diversityWeight = algParameters.getDiversityWeight();

		// READING INPUT
		DBCursor inputTweets = null;
		long begin_timestamp = begin_date.getTimeInMillis();
		if (algParameters.hasEndDate()) {
			long end_timestamp = end_date.getTimeInMillis();
			inputTweets = readIngestedData(localMongo, begin_timestamp,
					end_timestamp);
		} else {
			inputTweets = readIngestedData(localMongo, begin_timestamp,
					tweetsNumberLimit);
		}

		LocationManager lMng = new LocationManager(areaOfInterest);
		FilteringAlgorithm fAlg = new FilteringAlgorithm(lMng);
		// QualityCalculator qCalc = new QualityCalculator(fAlg);
		QualityCalculator qCalc = new QualityCalculator();
		TimeManager tMng = new TimeManager(begin_date, end_date);

		// tweets in internal format, to be input to summarization algorithm
		while (inputTweets.hasNext()) {
			DBObject tweet = inputTweets.next();
			numberOfTweetsPrefiltering++;
			if (numberOfTweetsPrefiltering % 1000 == 0) {
				System.out.println("read " + numberOfTweetsPrefiltering
						+ " tweets from DB");
			}
			InputTweetInfoExtractor infoExtr = new InputTweetInfoExtractor(
					tweet);
			long tweetId = infoExtr.getId();

			// FILTERING & PREPROCESSING
			if (fAlg.mustBeFiltered(tweet)) {
				continue;
			}
			String preprocText = fAlg.getPreprocessedText(infoExtr.getText());
			numberOfTweets++;

			// CREATING MEMORY TWEET
			DataObject memoryT = new DataObject(tweetId);
			memoryT.setDate(new Date(infoExtr.getTimestamp()));
			memoryT.setText(infoExtr.getText());
			memoryTweets.put(tweetId, memoryT);

			// CREATING THE CLUSTERING SET
			ClusterItem clusterItem = new ClusterItem(tweetId, preprocText);
			clusterItems.add(clusterItem);

			// COMPUTING QUALITY
			double info = qCalc.computeInfo(tweet, preprocText);
			List<Relevance> relevances = qCalc.computeRelevances(tweet,
					preprocText);
			double quality = qCalc.computeQuality(info, relevances);
			memoryT.setQuality(quality);

			// DISCRETE LOCATION & TIME CATEGORIES
			String locationCategory = lMng.categorize(tweet);
			String timeCategory = tMng.categorize(tweet);

			writeTweetInfo(tweet, quality, info, relevances, preprocText,
					locationCategory, timeCategory);
		}
		// make examples balance
		Dimension conceptDict = qCalc
				.getConceptDictionary();
		double total = 0;
		double min = Integer.MAX_VALUE;
		for (String key : conceptDict.value2obj.keySet()) {
			int size = conceptDict.value2obj.get(key).size();
			total += size;
			if (min > size) {
				min = size;
			}
		}
		HashMap<Long, DataObject> tmpTweets = new HashMap<Long, DataObject>();
		for (String key : conceptDict.value2obj.keySet()) {
			HashMap<Long, Double> tweets = conceptDict.value2obj.get(key);
			double thresh = min * 2 / tweets.size();
			HashMap<Long, Double> newTweets = new HashMap<Long, Double>();
			for (Long id : tweets.keySet()) {
				if (new Random().nextDouble() < thresh) {
					newTweets.put(id, tweets.get(id));
				}
			}
			conceptDict.value2obj.put(key, newTweets);
			for (Long id : newTweets.keySet()) {
				tmpTweets.put(id, memoryTweets.get(id));
			}
		}
//		int count = 0;
//		int currSize = tmpTweets.size();
//		for (DataObject obj : memoryTweets.values()) {
//			if (count++ > currSize / 2) {
//				break;
//			}
//			boolean match = false;
//			for (HashMap<Long, Double> tweets : conceptDict.values()) {
//				if (tweets.containsKey(obj.getDbId())) {
//					match = true;
//					break;
//				}
//			}
//			if (!match) {
//				tmpTweets.put(obj.getDbId(), obj);
//			}
//		}
		memoryTweets = tmpTweets;
		//--------------------------------------------------------------------
		System.out.println("number of tweets aftet filtering: "
				+ memoryTweets.size());
		if (memoryTweets.isEmpty())
			return;

		// CLUSTERING
		ClusteringAlgorithm cAlgorithm = new ClusteringAlgorithm(clusterItems);
		Set<Cluster> clusters = cAlgorithm.cluster();

		writeClusters(clusters);

		// ----------- organize attributes ---------------------------
		Dimension conceptDictionary = qCalc
				.getConceptDictionary();
		Dimension locationDictionary = lMng
				.getLocationDictionary();
		Dimension clusterDictionary = cAlgorithm
				.getClusterDictionary();
		Dimension timeDictionary = tMng
				.getClusterDictionary();
		dimensions.add(conceptDictionary);
		dimensions.add(locationDictionary);
	dimensions.add(clusterDictionary);
		dimensions.add(timeDictionary);

		// generateSyntheticProb(attributes, memoryTweets);
		// double[] syntheticAttrSizes = { 6, 7, 8 };
		// List<Hashtable<String, HashMap<Long, Double>>> attrs =
		// generateSystheticAttr(
		// syntheticAttrSizes, memoryTweets);
		// for (Hashtable<String, HashMap<Long, Double>> attr : attrs) {
		// attributes.add(attr);
		// }
		for (int i = 0; i < dimensions.size(); i++) {
			weights.add(1.0);
		}
	}

	private static List<Hashtable<String, HashMap<Long, Double>>> generateSystheticAttr(
			double[] sizes, HashMap<Long, DataObject> memoryTweets) {
		List<Hashtable<String, HashMap<Long, Double>>> attrs = new ArrayList<Hashtable<String, HashMap<Long, Double>>>();
		for (int i = 0; i < sizes.length; i++) {
			Hashtable<String, HashMap<Long, Double>> attr = new Hashtable<String, HashMap<Long, Double>>();
			for (int j = 0; j < sizes[i]; j++) {
				attr.put("a_" + i + "_" + j, new HashMap<Long, Double>());
			}
			attrs.add(attr);
		}
		generateSyntheticProb(attrs, memoryTweets);
		return attrs;
	}

	public static void visualize(Summary summary) {
		summary.memory2DB();
		writeResults(localMongo, summary);
		VisualizationFrame visFrame = new VisualizationFrame(summary);
		visFrame.setVisible(true);
	}

	private static void generateSyntheticProb(
			List<Hashtable<String, HashMap<Long, Double>>> attributes,
			HashMap<Long, DataObject> memoryTweets) {
		// generate synthetic probabilities
		for (Hashtable<String, HashMap<Long, Double>> attribute : attributes) {
			for (HashMap<Long, Double> values : attribute.values()) {
				values.clear();
			}
		}
		double[] randomVoc = new double[memoryTweets.size()];
		for (int i = 0; i < randomVoc.length; i++) {
			randomVoc[i] = new NormalDistribution(0, 5).sample();
		}
		for (DataObject t : memoryTweets.values()) {
			double scale = new Random().nextDouble();
			for (Hashtable<String, HashMap<Long, Double>> attribute : attributes) {
				int ind = (int) Math.min(attribute.size() - 1, Math.max(0,
						randomVoc[new Random().nextInt(randomVoc.length)]));
				double[] rds = new double[attribute.size()];
				double totalRd = 0;
				for (int i = 0; i < attribute.size(); i++) {
					rds[i] = new NormalDistribution(ind,
							attributes.size() / 100.0).density(i);
					totalRd += rds[i];
				}
				int count = 0;
				for (HashMap<Long, Double> values : attribute.values()) {
					values.put(t.getDbId(), rds[count++] / totalRd * scale);
				}

			}
		}
	}

	// CONNECT TO MONGODB
	public static void connectToLocalMongoDB() {
		try {
			localMongo = new Mongo();
		} catch (UnknownHostException e) {
			e.printStackTrace();
			System.exit(-1);
		}
	}

	@SuppressWarnings("unused")
	private static void connectToRemoteMongoDB() {
		try {
			remoteMongo = new Mongo(MONGODBHOST, MONGODBPORT);
		} catch (UnknownHostException e) {
			e.printStackTrace();
			System.exit(-1);
		}
	}

	// DROP OUTPUT DATABASE
	private static void dropOutputDatabase(Mongo m) {
		DB localDB = m.getDB(OUTPUTDB);

		localDB.dropDatabase();
	}

	// READ INPUT DATA
	public static DBCursor readIngestedData(Mongo m, long begin_timestamp,
			long end_timestamp) {
		DB db = m.getDB(INPUTDB);

		DBCollection c = db.getCollection(INPUTCOLL);

		DBObject query = new BasicDBObject();

		query.put("header.tweet_time", new BasicDBObject("$gte", ""
				+ begin_timestamp).append("$lte", "" + end_timestamp));

		DBCursor cursor = c.find(query);

		return cursor;
	}

	public static DBCursor readIngestedData(Mongo m, long begin_timestamp,
			int numberOfTweets) {
		DB db = m.getDB(INPUTDB);

		DBCollection c = db.getCollection(INPUTCOLL);

		DBObject query = new BasicDBObject();

		query.put("header.tweet_time", new BasicDBObject("$gte", ""
				+ begin_timestamp));

		DBCursor cursor = c.find(query).limit(numberOfTweets);

		return cursor;
	}

	// WRITE AND READ TWEETSINFO COLLECTION
	private static void writeTweetInfo(DBObject tweet, double quality,
			double info, List<Relevance> relevances, String preprocText,
			String locationCategory, String timeCategory) {
		DB db = localMongo.getDB(OUTPUTDB);

		DBCollection c = db.getCollection(TWEETSCOLL);

		DBObject obj = new BasicDBObject();

		TweetInfoExtractor InfoExtr = new InputTweetInfoExtractor(tweet);
		long tweetId = InfoExtr.getId();
		obj.put("tweet_id", tweetId);

		DBObject qualityObj = new BasicDBObject();
		qualityObj.put("total", quality);
		qualityObj.put("info", info);
		BasicDBList relevancesList = new BasicDBList();
		for (Relevance rel : relevances) {
			String conceptId = rel.getConceptId();
			double relevanceScore = rel.getRelevance();
			DBObject relevanceObj = new BasicDBObject();
			relevanceObj.put("concept", conceptId);
			relevanceObj.put("relevance", relevanceScore);
			relevancesList.add(relevanceObj);
		}
		qualityObj.put("relevances", relevancesList);
		obj.put("quality", qualityObj);

		String text = InfoExtr.getText();
		obj.put("text", text);

		obj.put("preprocessed_text", preprocText);

		long timestamp = InfoExtr.getTimestamp();
		obj.put("timestamp", timestamp);

		Object coordinates = InfoExtr.getCoordinates();
		obj.put("coordinates", coordinates);

		obj.put("location_cat", locationCategory);

		obj.put("time_cat", timeCategory);

		BasicDBList conceptsList = new BasicDBList();
		for (Relevance rel : relevances) {
			String conceptId = rel.getConceptId();
			conceptsList.add(conceptId);
		}
		obj.put("concepts", conceptsList);

		c.insert(obj);

		if (c.count() == 1) {
			c.createIndex(new BasicDBObject("tweet_id", 1));
		}
	}

	public static DBCursor readTweetsInfoData() {
		DB db = localMongo.getDB(OUTPUTDB);

		DBCollection c = db.getCollection(TWEETSCOLL);

		DBCursor cursor = c.find();

		return cursor;
	}

	public static DBObject getTweetWithTweetId(long tweetId) {
		DB db = localMongo.getDB(OUTPUTDB);

		DBCollection c = db.getCollection(TWEETSCOLL);

		DBObject query = new BasicDBObject();

		query.put("tweet_id", tweetId);

		DBCursor cursor = c.find(query);

		DBObject tweet = cursor.next();

		return tweet;
	}

	// WRITE AND READ CLUSTER COLLECTION
	private static void writeClusters(Set<Cluster> clusters) {
		DB db = localMongo.getDB(OUTPUTDB);

		DBCollection c = db.getCollection(CLUSTCOLL);

		for (Cluster cluster : clusters) {
			String clusterId = cluster.getClusterId();
			for (ClusterItem clusterItem : cluster.getClusterItems()) {
				DBObject obj = new BasicDBObject();

				obj.put("cluster", clusterId);

				long tweetId = clusterItem.getTweetId();
				obj.put("tweet_id", tweetId);

				c.insert(obj);

				if (c.count() == 1) {
					c.createIndex(new BasicDBObject("tweet_id", 1));
				}
			}
		}
	}

	public static String readClusterWithTweetId(Long tweetId) {
		DB db = localMongo.getDB(OUTPUTDB);

		DBCollection c = db.getCollection(CLUSTCOLL);

		DBObject query = new BasicDBObject();
		query.put("tweet_id", tweetId);

		DBCursor cursor = c.find(query);

		DBObject cluster = cursor.next();
		String clusterId = cluster.get("cluster").toString();

		return clusterId;
	}

	// WRITE AND READ RESULTS
	public static void writeResults(Mongo m, Summary summary) {
		DB dbLocal = m.getDB(SAVRESDB);

		// Find the next id for the results
		DBCollection cStatistics = dbLocal.getCollection(STATSCOLL);
		DBCursor statistics = cStatistics.find();
		Integer idNextResult = 1;
		while (statistics.hasNext()) {
			idNextResult = (Integer) statistics.next().get("result_id") + 1;
		}

		// Write the statistics
		DBObject obj = new BasicDBObject();
		obj.put("result_id", idNextResult);
		obj.put("quality", summary.getQuality());
		obj.put("coverage", summary.getCoverage());
		obj.put("diversity", summary.getDiversity());
		obj.put("total_score", summary.getScore());
		obj.put("summary_size", FSModule.getSummaryDimension());
		obj.put("number_of_pages", FSModule.getNumberOfPages());
		obj.put("dataset_size_prefiltering", numberOfTweetsPrefiltering);
		obj.put("dataset_size", numberOfTweets);

		int month = begin_date.get(GregorianCalendar.MONTH) + 1;
		int day = begin_date.get(GregorianCalendar.DAY_OF_MONTH);
		int year = begin_date.get(GregorianCalendar.YEAR);
		int hour = begin_date.get(GregorianCalendar.HOUR_OF_DAY);
		int minute = begin_date.get(GregorianCalendar.MINUTE);
		String beginDate = "" + month + "-" + day + "-" + year + " " + hour
				+ ":" + minute;
		obj.put("begin_date", beginDate);

		if (end_date != null) {
			month = end_date.get(GregorianCalendar.MONTH) + 1;
			day = end_date.get(GregorianCalendar.DAY_OF_MONTH);
			year = end_date.get(GregorianCalendar.YEAR);
			hour = end_date.get(GregorianCalendar.HOUR_OF_DAY);
			minute = end_date.get(GregorianCalendar.MINUTE);
			String endDate = "" + month + "-" + day + "-" + year + " " + hour
					+ ":" + minute;
			obj.put("end_date", endDate);
		} else {
			obj.put("tweets_number_limit", tweetsNumberLimit);
		}

		obj.put("algorithm_used", summarizerType);
		obj.put("execution_time", executionTime);
		cStatistics.insert(obj);

		// Write the summary
		DBCollection cSummary = dbLocal.getCollection(SUMMCOLL + idNextResult);
		for (DBObject tweet : summary.getTweets()) {
			InternTweetInfoExtractor infoExtr = new InternTweetInfoExtractor(
					tweet);
			long tweetId = infoExtr.getId();
			String clusterId = FSModule.readClusterWithTweetId(tweetId);
			tweet.put("cluster", clusterId);

			cSummary.insert(tweet);

			try {
				Thread.sleep(1000);
			} catch (InterruptedException e) {
				e.printStackTrace();
				System.exit(-1);
			}
		}
	}

	public static DBObject readStoredStatistics(int resultId) {
		connectToLocalMongoDB();

		DB dbLocal = localMongo.getDB(SAVRESDB);

		DBCollection cStatistics = dbLocal.getCollection(STATSCOLL);

		DBObject query = new BasicDBObject();
		query.put("result_id", resultId);

		DBCursor cursor = cStatistics.find(query);

		return cursor.next();
	}

	public static List<DBObject> readStoredSummary(int resultId) {
		connectToLocalMongoDB();

		DB dbLocal = localMongo.getDB(SAVRESDB);

		DBCollection cSummary = dbLocal.getCollection(SUMMCOLL + resultId);

		DBCursor cursor = cSummary.find();

		List<DBObject> results = new ArrayList<DBObject>();

		while (cursor.hasNext()) {
			results.add(cursor.next());
		}

		return results;
	}

	// READ CLASSIFIER SAMPLES
	public static DBCursor readPositiveSamples(String concept) {
		DB db = localMongo.getDB(CLASSIFIERDB);

		DBCollection c = db.getCollection(concept + "_positive");

		DBCursor positiveSamples = c.find();

		return positiveSamples;
	}

	public static DBCursor readNegativeSamples(String concept) {
		DB db = localMongo.getDB(CLASSIFIERDB);

		DBCollection c = db.getCollection(concept + "_negative");

		DBCursor negativeSamples = c.find();

		return negativeSamples;
	}

	// READ CONCEPTS FILE
	public static void readConceptsFile() {
		concepts = new ArrayList<Concept>();

		BufferedReader br = null;
		try {
			br = new BufferedReader(new FileReader(CONCFILE));
		} catch (FileNotFoundException e) {
			e.printStackTrace();
			System.exit(-1);
		}
		try {
			String line = br.readLine();
			while ((line = br.readLine()) != null) {

				StringTokenizer tk = new StringTokenizer(line, ",");

				String conceptId = tk.nextToken();
				String keyword = tk.nextToken();
				boolean added = false;

				for (Concept concept : concepts) {
					if (concept.getId().compareTo(conceptId) == 0) {
						concept.addKeyword(keyword.toLowerCase().trim());
						added = true;
					}
				}
				if (!added) {
					Concept concept = new Concept(conceptId);
					concept.addKeyword(keyword);
					concepts.add(concept);
				}
			}
		} catch (IOException e) {
			e.printStackTrace();
			System.exit(-1);
		}
	}

	// GETTERS
	public static long getDatasetDimension() {
		return numberOfTweets;
	}

	public static GregorianCalendar getBeginDate() {
		return begin_date;
	}

	public static GregorianCalendar getEndDate() {
		return end_date;
	}

	public static List<Concept> getConcepts() {
		return concepts;
	}

	public static double getQualityWeigth() {
		return qualityWeight;
	}

	public static double getCoverageWeigth() {
		return coverageWeight;
	}

	public static double getDiversityWeigth() {
		return diversityWeight;
	}

	public static int getTweetsNumberLimit() {
		return tweetsNumberLimit;
	}

	public static int getSummaryDimension() {
		return M;
	}

	public static int getNumberOfPages() {
		return numberOfPages;
	}

	public static GregorianCalendar getEndDateFromDataset() {
		DBCursor tweets = readIngestedData(localMongo,
				begin_date.getTimeInMillis(), tweetsNumberLimit);

		long maxTimestamp = 0;
		DBObject tweet = null;
		while (tweets.hasNext()) {
			tweet = tweets.next();
			InputTweetInfoExtractor infoExtr = new InputTweetInfoExtractor(
					tweet);
			long timestamp = infoExtr.getTimestamp();
			if (timestamp > maxTimestamp) {
				maxTimestamp = timestamp;
			}
		}

		GregorianCalendar date = new GregorianCalendar();
		date.setTimeInMillis(maxTimestamp);

		return date;
	}

}
