/**
 * This file is part of Sonedyan.
 * 
 * Sonedyan is free software; you can redistribute it and/or
 * modify it under the terms of the GNU General Public
 * License as published by the Free Software Foundation;
 * either version 3 of the License, or (at your option) any
 * later version.
 *
 * Sonedyan is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied
 * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
 * PURPOSE.  See the GNU General Public License for more
 * details.
 *
 * You should have received a copy of the GNU General Public
 * License along with Octave; see the file COPYING.  If not
 * see <http://www.gnu.org/licenses/>.
 * 
 * Copyright (C) 2009-2012 Jimmy Dubuisson <jimmy.dubuisson@gmail.com>
 */

package org.unige.mpej.eckmann.sonedyan.fns;

import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Hashtable;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.Vector;

import org.unige.mpej.eckmann.sonedyan.bean.Edge;
import org.unige.mpej.eckmann.sonedyan.bean.Vertex;
import org.unige.mpej.eckmann.sonedyan.fns.bean.Actor;
import org.unige.mpej.eckmann.sonedyan.fns.bean.Link;
import org.unige.mpej.eckmann.sonedyan.fns.bean.Actor.Status;
import org.unige.mpej.eckmann.sonedyan.fns.classification.StatusClassifier;
import org.unige.mpej.eckmann.sonedyan.fns.db.DatasetLoader;
import org.unige.mpej.eckmann.sonedyan.fns.graph.GraphMetrics;
import org.unige.mpej.eckmann.sonedyan.graph.GraphPlotter;
import org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.GraphMetric;
import org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.NodeMetric;
import org.unige.mpej.eckmann.sonedyan.utils.DataUtils;
import org.unige.mpej.eckmann.sonedyan.utils.GraphUtils;

import edu.uci.ics.jung.algorithms.cluster.BicomponentClusterer;
import edu.uci.ics.jung.algorithms.cluster.WeakComponentClusterer;
import edu.uci.ics.jung.graph.Graph;
import edu.uci.ics.jung.graph.SparseMultigraph;
import edu.uci.ics.jung.graph.UndirectedGraph;

import org.apache.log4j.Logger;

/**
 * main class used for analyzing the FNS data
 */
public class FnsDataAnalyzer
{
	private static Logger log = Logger.getLogger(org.unige.mpej.eckmann.sonedyan.fns.FnsDataAnalyzer.class);
	
	// 100 highest degree nodes over the total period (01.10.2010 -> 01.10.2014)
	private static final String[] HIGHEST_DEGREE_NODES = {
		"739","736","718","1765","1160","1776","1739","4295","1955","416",
		"1726","1963","1847","1570","1844","2857","719","417","1459","2072",
		"2073","2076","2075","2084","2089","241","907","1851","2856","1457",
		"766","1177","2071","2074","2078","2077","2081","2079","1971","1255",
		"2859","2873","2874","2876","2870","2862","2861","2866","2867","2865",
		"4350","2121","3538","3537","3536","3534","3533","3539","3542","811",
		"1970","5660","3535","1972","2833","75","2055","3879","566","843",
		"2699","2130","2127","2128","2124","2135","2136","4291","745","740",
		"742","737","738","3008","43","621","1067","2593","2133","2090",
		"3544","2080","2085","2083","2086","418","942","799","786","6154"
	};
	
	/**
	 * class constructor
	 */
	public FnsDataAnalyzer(){}
	
	/**
	 * main method	
	 */
	public static void main(String args[])
	{
		try
		{
			log.debug("Instantiating an FNS data analyzer");
					
			if (args.length == 0)
			{
				displayUsage();
				System.exit(0);
			}
			
			String action = args[0];
			
			/**
			 * load the FNS data
			*/
			if (action.equals("l1"))
			{
				log.info("Loading data...");
				
				DatasetLoader loader = new DatasetLoader();
				SparseMultigraph<Actor, Link> multiGraph = loader.loadDataset();
				
				log.info("Multigraph loaded...");
				log.info("Number of vertices: " + multiGraph.getVertexCount());
				log.info("Number of edges: " + multiGraph.getEdgeCount());
				
				log.info("Exporting to Arff format...");
				
				DataUtils.export2Arff("data.arff", multiGraph);
				
				log.info("Loading Arff file...");
				
				SparseMultigraph<Actor, Link> multiGraph2 = DatasetLoader.loadArffDataset("data.arff");
			}
			else if (action.equals("l2"))
			{
				log.info("Loading data...");
				
				DatasetLoader loader = new DatasetLoader();
				SparseMultigraph<Actor, Link> multiGraph = loader.loadDataset();
				
				log.info("Multigraph loaded...");
				log.info("Number of vertices: " + multiGraph.getVertexCount());
				log.info("Number of edges: " + multiGraph.getEdgeCount());
				
				log.info("Exporting to Arff format...");
				
				DataUtils.export2Arff("data.arff", multiGraph);
				
				log.info("Loading actor status...");
				
				Hashtable<String, TreeMap<Date, Vector<Status>>> map = org.unige.mpej.eckmann.sonedyan.fns.utils.DataUtils.getActorTimeStatusMap(multiGraph);
				
				SimpleDateFormat formatter = new SimpleDateFormat(DataUtils.REDUCED_DATE_FORMAT);
				Set<String> ids = map.keySet();
				
				log.info("Listing multiple status occurences...");
				
				for (String id : ids)
				{
					TreeMap<Date, Vector<Status>> timeStatus = map.get(id);
					Set<Date> dates = timeStatus.keySet();
					
					for (Date d : dates)
					{
						Vector<Status> status = timeStatus.get(d);
						
						if (status != null && status.size() > 1)
						{
							log.info(id + " : " + formatter.format(d)+ " : " + status);
						}
					}
				}
				
				Date startDate = new SimpleDateFormat(DataUtils.INPUT_DATE_FORMAT).parse("01.10.2010");
				Date endDate = new SimpleDateFormat(DataUtils.INPUT_DATE_FORMAT).parse("01.10.2014");
				
				Hashtable<Date, Vector<String>> timeActorIds = org.unige.mpej.eckmann.sonedyan.fns.utils.DataUtils.getActorStatusIdsInDateRange(map, startDate, endDate, Status.PHD);
				
				Date testDate = new SimpleDateFormat(DataUtils.INPUT_DATE_FORMAT).parse("01.10.2012");
				
				Vector<String> actorIds = timeActorIds.get(testDate);
				
				log.info("Set of PHD actors on the 1st of October 2012: " + actorIds);
				
				log.info("Extracting graph from multigraph");
				
				UndirectedGraph<Actor, Link> graph = GraphUtils.extractUndirectedGraphFromMultiGraph(multiGraph);
				
				log.info("Graph extracted...");
				log.info("Number of vertices: " + graph.getVertexCount());
				log.info("Number of edges: " + graph.getEdgeCount());
				
				GraphUtils.exportAsPajek(graph);
				GraphUtils.exportAsGraphMl(graph);
				GraphUtils.exportAsGml(graph);
				
				log.info("Clustering extracted graph...");
				
				log.info("Using weak component clusterer...");
				
				WeakComponentClusterer<Actor, Link> wcClusterer = new WeakComponentClusterer<Actor, Link>();
				Set<Set<Actor>> components = wcClusterer.transform(graph);
				
				int counter = 1;
				
				for (Set<Actor> set : components)
				{
					log.info("Component " + counter + " contains " + set.size() + " nodes");
					
					StringBuffer buffer = new StringBuffer();
					
					for (Actor actor : set)
					{
						buffer.append(actor.getId() + " ");
					}
					
					log.info(buffer.toString() + "\n");
					
					counter++;
				}
				
				log.info("Using bicomponent clusterer...");
				
				BicomponentClusterer<Actor, Link> bcClusterer = new BicomponentClusterer<Actor, Link>();
				components = bcClusterer.transform(graph);
				
				counter = 1;
				
				for (Set<Actor> set : components)
				{
					log.info("Component " + counter + " contains " + set.size() + " nodes");
					
					StringBuffer buffer = new StringBuffer();
					
					for (Actor actor : set)
					{
						buffer.append(actor.getId() + " ");
					}
					
					log.info(buffer.toString() + "\n");
					
					counter++;
				}
				
				log.info("Extracting subgraph of 100 highest degree nodes...");
				
				Vector<String> highestDegreeNodes = new Vector<String>();
				
				for (String s : HIGHEST_DEGREE_NODES)
				{
					highestDegreeNodes.add(s);
				}
				
				Graph<Actor, Link> subGraph = GraphUtils.getSubGraph(graph, highestDegreeNodes);
				
				GraphUtils.exportAsPajek(subGraph, "100-highest-degree_subgraph.net");
				GraphUtils.exportAsGraphMl(subGraph, "100-highest-degree_subgraph.xml");
				GraphUtils.exportAsGml(subGraph, "100-highest-degree_subgraph.gml");
			}
			else if (action.equals("m"))
			{
				Graph<Vertex, Edge> g = GraphUtils.loadGraphMl("graphml.xml");
				
				System.out.println("----------");
				System.out.println("Graph Metrics:\n");
				
				org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics<Vertex, Edge> graphMetrics = new org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics<Vertex, Edge>();
				
				double nV = graphMetrics.getGraphMetric(g, GraphMetric.NUMBER_OF_VERTICES);
				System.out.println("Number of Vertices: " +  nV);
				
				double nE = graphMetrics.getGraphMetric(g, GraphMetric.NUMBER_OF_EDGES);
				System.out.println("Number of Edges: " +  nE);
				
				double diam = graphMetrics.getGraphMetric(g, GraphMetric.DIAMETER);
				System.out.println("Diameter: " +  diam);
				
				double density = graphMetrics.getGraphMetric(g, GraphMetric.DENSITY);
				System.out.println("Density: " + density);
				
				System.out.println("----------");
				System.out.println("AVG Graph Metrics:\n");
				
				double avgCC = graphMetrics.getGraphMetric(g, GraphMetric.AVG_CLUSTERING_COEFFICIENT);
				System.out.println("Average Clustering Coefficient: " +  avgCC);
				
				double avgVD = graphMetrics.getGraphMetric(g, GraphMetric.AVG_VERTEX_DEGREE);
				System.out.println("Average Vertex Degree: " +  avgVD);
				
				double avgVBC = graphMetrics.getGraphMetric(g, GraphMetric.AVG_VERTEX_BETWEENESS_CENTRALITY);
				System.out.println("Average Vertex Betweeness Centrality: " +  avgVBC);
				
				double avgEBC = graphMetrics.getGraphMetric(g, GraphMetric.AVG_EDGE_BETWEENESS_CENTRALITY);
				System.out.println("Average Edge Betweeness Centrality: " +  avgEBC);
				
				Map<Integer, Integer> degreeMap = graphMetrics.getDegreeDistribution(g);
				
				SortedSet<Integer> keysSet = new TreeSet<Integer>(degreeMap.keySet());
				
				System.out.println("\n----------");
				System.out.println("Degree distribution:\n");
				
				for (Integer key : keysSet)
				{
					System.out.println("    deg: " + key + " -> #nodes: " + degreeMap.get(key));
				}
				
				System.out.println("\n----------");
				System.out.println("Degree curvature distribution\n");
				
				Map<Integer, Double> curvatureMap = graphMetrics.getDegreeCurvatureDistribution(g);
				
				keysSet = new TreeSet<Integer>(curvatureMap.keySet());
				
				for (Integer key : keysSet)
				{
					System.out.println("    deg:" + key + " -> curvature: " + curvatureMap.get(key));
				}
				
				System.out.println("\n----------");
				System.out.println("Node Metrics:");
				
				Map<String, Double> degM = graphMetrics.getSortedNodeMetric(g, NodeMetric.DEGREE);
				
				System.out.println("\n\n--> Degree: Node id");
				
				Set<String> ids = degM.keySet();
				int count = 1;
				
				for (String id : ids)
				{
					Double value = degM.get(id);
					System.out.println(value + ": " + id);
					
					if (count == 100){ break; }
					count++;
				}
				
				Map<String, Double> curvM = graphMetrics.getSortedNodeMetric(g, NodeMetric.CURVATURE);
				
				System.out.println("\n\n--> Curvature: Node id");
				
				ids = curvM.keySet();
				count = 1;
				
				for (String id : ids)
				{
					Double value = curvM.get(id);
					System.out.println(value + ": " + id);
					
					if (count == 100){ break; }
					count++;
				}
				
				/*
				// it is slow!
				Map<String, Double> vBCM = GraphMetrics.getSortedNodeMetric(g, NodeMetric.VERTEX_BETWEENESS_CENTRALITY);
				
				System.out.println("\n\n--> Vertex Betweeness Centrality: Node id");
				
				ids = vBCM.keySet();
				count = 1;
				
				for (String id : ids)
				{
					Double value = vBCM.get(id);
					System.out.println(value + ": " + id);
					
					if (count == 100){ break; }
					count++;
				}
				*/
			}
			/**
			 * plot the degree distribution
			 */
			else if (action.equals("p"))
			{
				Graph<Vertex, Edge> g = GraphUtils.loadGraphMl(GraphUtils.GRAPHML_FILENAME);
				
				GraphPlotter.plotDegreeDistributionBarChartFrame(g);
			}
			/**
			 * generate subgraphs for a set of dates
			 */
			else if (action.equals("g"))
			{
				log.info("Loading data...");
				
				DatasetLoader loader = new DatasetLoader();
				SparseMultigraph<Actor, Link> multiGraph = loader.loadDataset();
				
				log.info("Multigraph loaded...");
				
				Calendar cal = Calendar.getInstance(); 
				
				TreeMap<Date, Double> data1 = new TreeMap<Date, Double>();
				TreeMap<Date, Double> data2 = new TreeMap<Date, Double>();
				TreeMap<Date, Double> data3 = new TreeMap<Date, Double>();
				TreeMap<Date, Double> data4 = new TreeMap<Date, Double>();
				
				Date date = new SimpleDateFormat(DataUtils.INPUT_DATE_FORMAT).parse("01.01.2000");
				cal.setTime(date);
				GraphMetric metric = GraphMetric.AVG_VERTEX_BETWEENESS_CENTRALITY;
				
				org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics<Actor, Link> graphMetrics = new org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics<Actor, Link>();
				
				// NB: the metric is computed over the whole graph (all connected components)
				for (int i = 0; i < 12; i++)
				{
					Graph<Actor, Link> graph = GraphUtils.getMultiGraphSnapshot(multiGraph, date.getTime(), 0);
					
					double professorAvgValue = GraphMetrics.getAverageStatusGraphMetricValue(graph, metric, Actor.Status.PROFESSOR);
					double postDocAvgValue = GraphMetrics.getAverageStatusGraphMetricValue(graph, metric, Actor.Status.POSTDOC);
					double phdAvgValue = GraphMetrics.getAverageStatusGraphMetricValue(graph, metric, Actor.Status.PHD);
					double avgValue = graphMetrics.getAverageGraphMetricValue(graph, metric);
					
					data1.put(date, professorAvgValue);
					data2.put(date, postDocAvgValue);
					data3.put(date, phdAvgValue);
					data4.put(date, avgValue);
					
					// add one month to the current date
					cal.add(Calendar.MONTH, 1);
					date = cal.getTime();
				}
				
				Vector<TreeMap<Date, Double>> datasets = new Vector<TreeMap<Date, Double>>();
				
				datasets.add(data1);
				datasets.add(data2);
				datasets.add(data3);
				datasets.add(data4);
				
				DataUtils.writeTimeSeriesDatasets("gnuplot.dat", datasets);
			}
			/**
			 * managing node time series
			 */
			else if (action.equals("t1"))
			{
				log.info("Loading data...");
				
				SparseMultigraph<Actor, Link> multiGraph = DatasetLoader.loadArffDataset();
				
				log.info("Multigraph loaded...");
				
				Date startDate = new SimpleDateFormat(DataUtils.INPUT_DATE_FORMAT).parse("01.04.2003");
				Date endDate = new SimpleDateFormat(DataUtils.INPUT_DATE_FORMAT).parse("01.02.2013");
				
				log.info("Getting graph from multigraph...");
				
				UndirectedGraph<Actor, Link> graph = GraphUtils.extractUndirectedGraphFromMultiGraph(multiGraph);
				
				log.info("Graph extracted...");
				log.info("Number of vertices: " + graph.getVertexCount());
				log.info("Number of edges: " + graph.getEdgeCount());
				
				log.info("Clustering graph...");
				
				WeakComponentClusterer<Actor, Link> wcClusterer = new WeakComponentClusterer<Actor, Link>();
				Set<Set<Actor>> components = wcClusterer.transform(graph);
				
				log.info("Sorting clusters by size...");
				
				LinkedList<Set<Actor>> sortedClusters = GraphUtils.getSortedGraphClusters(components);
				
				Set<Actor> cluster1 = sortedClusters.getLast();
				Set<Actor> cluster2 = sortedClusters.get(sortedClusters.size() - 2);
				Set<Actor> cluster3 = sortedClusters.get(sortedClusters.size() - 3);
				Set<Actor> cluster4 = sortedClusters.get(sortedClusters.size() - 4);
				
				log.info("Getting nodes time series for cluster 1 (#nodes '" + cluster1.size() + "')...");
				
				// 40 days timelag
				long timeLag = (long) 3600 * 24 * 40 * 1000;
				
				org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics<Actor, Link> graphMetrics = new org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics<Actor, Link>();
				
				NodeMetric metric = NodeMetric.PAGERANK;
				
				Hashtable<String, TreeMap<Date, Double>> timeSeries1 = graphMetrics.getSubGraphNodesTimeSeries(multiGraph, cluster1, startDate, endDate, Calendar.MONTH, 1, timeLag, metric);
				TreeMap<Date, Double> avgTimeSeries1 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getAverageTimeSeries(timeSeries1);
				
				log.info("Getting nodes time series for cluster 2 (#nodes '" + cluster2.size() + "')...");
				
				Hashtable<String, TreeMap<Date, Double>> timeSeries2 = graphMetrics.getSubGraphNodesTimeSeries(multiGraph, cluster2, startDate, endDate, Calendar.MONTH, 1, timeLag, metric);
				TreeMap<Date, Double> avgTimeSeries2 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getAverageTimeSeries(timeSeries2);
				
				log.info("Getting nodes time series for cluster 3 (#nodes '" + cluster3.size() + "')...");
				
				Hashtable<String, TreeMap<Date, Double>> timeSeries3 = graphMetrics.getSubGraphNodesTimeSeries(multiGraph, cluster3, startDate, endDate, Calendar.MONTH, 1, timeLag, metric);
				TreeMap<Date, Double> avgTimeSeries3 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getAverageTimeSeries(timeSeries3);
				
				log.info("Getting nodes time series for cluster 4 (#nodes '" + cluster4.size() + "')...");

				Hashtable<String, TreeMap<Date, Double>> timeSeries4 = graphMetrics.getSubGraphNodesTimeSeries(multiGraph, cluster4, startDate, endDate, Calendar.MONTH, 1, timeLag, metric);
				TreeMap<Date, Double> avgTimeSeries4 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getAverageTimeSeries(timeSeries4);
				
				Vector<TreeMap<Date, Double>> datasets = new Vector<TreeMap<Date, Double>>();
				
				datasets.add(avgTimeSeries1);
				datasets.add(avgTimeSeries2);
				datasets.add(avgTimeSeries3);
				datasets.add(avgTimeSeries4);
				
				DataUtils.writeTimeSeriesDatasets("avg-time-series-by-cluster.dat", datasets);	
			}
			/**
			 * managing time series
			 */
			else if (action.equals("t2"))
			{
				log.info("Loading data...");
				
				SparseMultigraph<Actor, Link> multiGraph = DatasetLoader.loadArffDataset();
				
				log.info("Multigraph loaded...");
				
				Date startDate = new SimpleDateFormat(DataUtils.INPUT_DATE_FORMAT).parse("01.04.2003");
				Date endDate = new SimpleDateFormat(DataUtils.INPUT_DATE_FORMAT).parse("01.02.2013");
				
				log.info("Getting graph from multigraph...");
				
				UndirectedGraph<Actor, Link> graph = GraphUtils.extractUndirectedGraphFromMultiGraph(multiGraph);
				
				log.info("Graph extracted...");
				log.info("Number of vertices: " + graph.getVertexCount());
				log.info("Number of edges: " + graph.getEdgeCount());
				
				log.info("Clustering graph...");
				
				WeakComponentClusterer<Actor, Link> wcClusterer = new WeakComponentClusterer<Actor, Link>();
				Set<Set<Actor>> components = wcClusterer.transform(graph);
				
				log.info("Sorting clusters by size...");
				
				LinkedList<Set<Actor>> sortedClusters = GraphUtils.getSortedGraphClusters(components);
				
				NodeMetric metric = NodeMetric.CURVATURE;
				
				// 80 days timelag
				long timeLag = (long) 3600 * 24 * 80 * 1000;
				int timeIncrement = 1;
				
				Set<Actor> mainCluster = sortedClusters.getLast();
				
				// get main cluster subgraph & export
				Graph<Actor, Link> mainClusterSubGraph = GraphUtils.getSubGraph2(graph, mainCluster);
				GraphUtils.exportAsPajek(mainClusterSubGraph, "main-cluster.net");
				GraphUtils.exportAsGml(mainClusterSubGraph, true, "main-cluster.xml");
				GraphUtils.exportAsGraphMl(mainClusterSubGraph, "main-cluster.gml");
				
				/*
				 
				log.info("Getting nodes time series for main cluster (#nodes '" + mainCluster.size() + "')...");
				
				
				log.info("Generating time series for status PHD (main cluster)...");
				
				Hashtable<String, TreeMap<Date, Double>> phdTimeSeries = GraphMetrics.getSubGraphStatusNodesTimeSeries(multiGraph, mainCluster, startDate, endDate, Calendar.MONTH, timeIncrement, timeLag, metric, Status.PHD);
				TreeMap<Date, Double> avgPhdTimeSeries = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getAverageTimeSeries(phdTimeSeries);
				
				log.info("Generating time series for status POSTDOC (main cluster)...");
				
				Hashtable<String, TreeMap<Date, Double>> postdocTimeSeries = GraphMetrics.getSubGraphStatusNodesTimeSeries(multiGraph, mainCluster, startDate, endDate, Calendar.MONTH, timeIncrement, timeLag, metric, Status.POSTDOC);
				TreeMap<Date, Double> avgPostdocTimeSeries = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getAverageTimeSeries(postdocTimeSeries);
				
				log.info("Generating time series for status PROFESSOR (main cluster)...");
				
				Hashtable<String, TreeMap<Date, Double>> professorTimeSeries = GraphMetrics.getSubGraphStatusNodesTimeSeries(multiGraph, mainCluster, startDate, endDate, Calendar.MONTH, timeIncrement, timeLag, metric, Status.PROFESSOR);
				TreeMap<Date, Double> avgProfessorTimeSeries = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getAverageTimeSeries(professorTimeSeries);
				
				log.info("Exporting Gnuplot data for main cluster...");
				
				Vector<TreeMap<Date, Double>> datasets = new Vector<TreeMap<Date, Double>>();
				
				datasets.add(avgPhdTimeSeries);
				datasets.add(avgPostdocTimeSeries);
				datasets.add(avgProfessorTimeSeries);
				
				DataUtils.writeTimeSeriesDatasets("avg-time-series-by-status.dat", datasets);
				
				TreeMap<Date, Double> profPostDocDiffTimeSeries = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getTimeSeriesDifference(avgProfessorTimeSeries, avgPostdocTimeSeries);
				TreeMap<Date, Double> postDocPhdDiffTimeSeries = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getTimeSeriesDifference(avgPostdocTimeSeries, avgPhdTimeSeries);
				TreeMap<Date, Double> profPhdDiffTimeSeries = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getTimeSeriesDifference(avgProfessorTimeSeries, avgPhdTimeSeries);
				
				Vector<TreeMap<Date, Double>> datasets2 = new Vector<TreeMap<Date, Double>>();
				
				datasets2.add(profPostDocDiffTimeSeries);
				datasets2.add(postDocPhdDiffTimeSeries);
				datasets2.add(profPhdDiffTimeSeries);
				
				DataUtils.writeTimeSeriesDatasets("avg-time-series-diff-by-status.dat", datasets2);
				
				*/
				
				/*
				
				Set<Actor> cluster2 = sortedClusters.get(sortedClusters.size() - 2);
				
				log.info("Getting nodes time series for cluster 2 (#nodes '" + cluster2.size() + "')...");
				
				log.info("Generating time series for status PHD (cluster 2)...");
				
				Hashtable<String, TreeMap<Date, Double>> phdTimeSeries2 = GraphMetrics.getSubGraphStatusNodesTimeSeries(multiGraph, cluster2, startDate, endDate, Calendar.MONTH, timeIncrement, timeLag, metric, Status.PHD);
				TreeMap<Date, Double> avgPhdTimeSeries2 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getAverageTimeSeries(phdTimeSeries2);
				
				log.info("Generating time series for status POSTDOC (cluster 2)...");
				
				Hashtable<String, TreeMap<Date, Double>> postdocTimeSeries2 = GraphMetrics.getSubGraphStatusNodesTimeSeries(multiGraph, cluster2, startDate, endDate, Calendar.MONTH, timeIncrement, timeLag, metric, Status.POSTDOC);
				TreeMap<Date, Double> avgPostdocTimeSeries2 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getAverageTimeSeries(postdocTimeSeries2);
				
				log.info("Generating time series for status PROFESSOR (cluster 2)...");
				
				Hashtable<String, TreeMap<Date, Double>> professorTimeSeries2 = GraphMetrics.getSubGraphStatusNodesTimeSeries(multiGraph, cluster2, startDate, endDate, Calendar.MONTH, timeIncrement, timeLag, metric, Status.PROFESSOR);
				TreeMap<Date, Double> avgProfessorTimeSeries2 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getAverageTimeSeries(professorTimeSeries2);
				
				log.info("Exporting Gnuplot data for cluster 2...");
				
				TreeMap<Date, Double> profPostDocDiffTimeSeries2 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getTimeSeriesDifference(avgProfessorTimeSeries2, avgPostdocTimeSeries2);
				TreeMap<Date, Double> postDocPhdDiffTimeSeries2 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getTimeSeriesDifference(avgPostdocTimeSeries2, avgPhdTimeSeries2);
				TreeMap<Date, Double> profPhdDiffTimeSeries2 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getTimeSeriesDifference(avgProfessorTimeSeries2, avgPhdTimeSeries2);
				
				Vector<TreeMap<Date, Double>> datasets22 = new Vector<TreeMap<Date, Double>>();
				
				datasets22.add(profPostDocDiffTimeSeries2);
				datasets22.add(postDocPhdDiffTimeSeries2);
				datasets22.add(profPhdDiffTimeSeries2);
				
				DataUtils.writeTimeSeriesDatasets("avg-time-series-diff2.dat", datasets22);
				
				Set<Actor> cluster3 = sortedClusters.get(sortedClusters.size() - 3);
				
				log.info("Getting nodes time series for cluster 3 (#nodes '" + cluster3.size() + "')...");
				
				log.info("Generating time series for status PHD (cluster 3)...");
				
				Hashtable<String, TreeMap<Date, Double>> phdTimeSeries3 = GraphMetrics.getSubGraphStatusNodesTimeSeries(multiGraph, cluster3, startDate, endDate, Calendar.MONTH, timeIncrement, timeLag, metric, Status.PHD);
				TreeMap<Date, Double> avgPhdTimeSeries3 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getAverageTimeSeries(phdTimeSeries3);
				
				log.info("Generating time series for status POSTDOC (cluster 3)...");
				
				Hashtable<String, TreeMap<Date, Double>> postdocTimeSeries3 = GraphMetrics.getSubGraphStatusNodesTimeSeries(multiGraph, cluster3, startDate, endDate, Calendar.MONTH, timeIncrement, timeLag, metric, Status.POSTDOC);
				TreeMap<Date, Double> avgPostdocTimeSeries3 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getAverageTimeSeries(postdocTimeSeries3);
				
				log.info("Generating time series for status PROFESSOR (cluster 3)...");
				
				Hashtable<String, TreeMap<Date, Double>> professorTimeSeries3 = GraphMetrics.getSubGraphStatusNodesTimeSeries(multiGraph, cluster3, startDate, endDate, Calendar.MONTH, timeIncrement, timeLag, metric, Status.PROFESSOR);
				TreeMap<Date, Double> avgProfessorTimeSeries3 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getAverageTimeSeries(professorTimeSeries3);
				
				log.info("Exporting Gnuplot data for cluster 3...");
				
				TreeMap<Date, Double> profPostDocDiffTimeSeries3 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getTimeSeriesDifference(avgProfessorTimeSeries3, avgPostdocTimeSeries3);
				TreeMap<Date, Double> postDocPhdDiffTimeSeries3 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getTimeSeriesDifference(avgPostdocTimeSeries3, avgPhdTimeSeries3);
				TreeMap<Date, Double> profPhdDiffTimeSeries3 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getTimeSeriesDifference(avgProfessorTimeSeries3, avgPhdTimeSeries3);
				
				Vector<TreeMap<Date, Double>> datasets23 = new Vector<TreeMap<Date, Double>>();
				
				datasets23.add(profPostDocDiffTimeSeries3);
				datasets23.add(postDocPhdDiffTimeSeries3);
				datasets23.add(profPhdDiffTimeSeries3);
				
				DataUtils.writeTimeSeriesDatasets("avg-time-series-diff3.dat", datasets23);
				
				Set<Actor> cluster4 = sortedClusters.get(sortedClusters.size() - 4);
				
				log.info("Getting nodes time series for cluster 4 (#nodes '" + cluster4.size() + "')...");
				
				log.info("Generating time series for status PHD (cluster 4)...");
				
				Hashtable<String, TreeMap<Date, Double>> phdTimeSeries4 = GraphMetrics.getSubGraphStatusNodesTimeSeries(multiGraph, cluster4, startDate, endDate, Calendar.MONTH, timeIncrement, timeLag, metric, Status.PHD);
				TreeMap<Date, Double> avgPhdTimeSeries4 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getAverageTimeSeries(phdTimeSeries4);
				
				log.info("Generating time series for status POSTDOC (cluster 4)...");
				
				Hashtable<String, TreeMap<Date, Double>> postdocTimeSeries4 = GraphMetrics.getSubGraphStatusNodesTimeSeries(multiGraph, cluster4, startDate, endDate, Calendar.MONTH, timeIncrement, timeLag, metric, Status.POSTDOC);
				TreeMap<Date, Double> avgPostdocTimeSeries4 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getAverageTimeSeries(postdocTimeSeries4);
				
				log.info("Generating time series for status PROFESSOR (cluster 4)...");
				
				Hashtable<String, TreeMap<Date, Double>> professorTimeSeries4 = GraphMetrics.getSubGraphStatusNodesTimeSeries(multiGraph, cluster4, startDate, endDate, Calendar.MONTH, timeIncrement, timeLag, metric, Status.PROFESSOR);
				TreeMap<Date, Double> avgProfessorTimeSeries4 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getAverageTimeSeries(professorTimeSeries4);
				
				log.info("Exporting Gnuplot data for cluster 4...");
				
				TreeMap<Date, Double> profPostDocDiffTimeSeries4 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getTimeSeriesDifference(avgProfessorTimeSeries4, avgPostdocTimeSeries4);
				TreeMap<Date, Double> postDocPhdDiffTimeSeries4 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getTimeSeriesDifference(avgPostdocTimeSeries4, avgPhdTimeSeries4);
				TreeMap<Date, Double> profPhdDiffTimeSeries4 = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getTimeSeriesDifference(avgProfessorTimeSeries4, avgPhdTimeSeries4);
				
				Vector<TreeMap<Date, Double>> datasets24 = new Vector<TreeMap<Date, Double>>();
				
				datasets24.add(profPostDocDiffTimeSeries4);
				datasets24.add(postDocPhdDiffTimeSeries4);
				datasets24.add(profPhdDiffTimeSeries4);
				
				DataUtils.writeTimeSeriesDatasets("avg-time-series-diff4.dat", datasets24);
				
				*/
			}
			/**
			 * managing time series
			 * 
			 * load the status time series and compute the node series difference
			 */
			else if (action.equals("t3"))
			{
				Vector<TreeMap<Date, Double>> datasets = DataUtils.loadTimeSeriesDatasets("gnuplot.dat");
				Vector<TreeMap<Date, Double>> datasets2 = new Vector<TreeMap<Date, Double>>();
				
				TreeMap<Date, Double> avgProfessorTimeSeries = datasets.elementAt(2);
				TreeMap<Date, Double> avgPostDocTimeSeries = datasets.elementAt(1);
				TreeMap<Date, Double> avgPhdTimeSeries = datasets.elementAt(0);
				
				TreeMap<Date, Double> profPostDocDiffTimeSeries = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getTimeSeriesDifference(avgProfessorTimeSeries, avgPostDocTimeSeries);
				TreeMap<Date, Double> postDocPhdDiffTimeSeries = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getTimeSeriesDifference(avgPostDocTimeSeries, avgPhdTimeSeries);
				TreeMap<Date, Double> profPhdDiffTimeSeries = org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics.getTimeSeriesDifference(avgProfessorTimeSeries, avgPhdTimeSeries);
				
				datasets2.add(profPostDocDiffTimeSeries);
				datasets2.add(postDocPhdDiffTimeSeries);
				datasets2.add(profPhdDiffTimeSeries);
				
				DataUtils.writeTimeSeriesDatasets("gnuplot2.dat", datasets2);
			}
			/**
			 * managing time series
			 */
			else if (action.equals("t4"))
			{
				log.info("Loading data...");
				
				SparseMultigraph<Actor, Link> multiGraph = DatasetLoader.loadArffDataset();
				
				log.info("Multigraph loaded...");
				
				Date startDate = new SimpleDateFormat(DataUtils.INPUT_DATE_FORMAT).parse("01.04.2003");
				Date endDate = new SimpleDateFormat(DataUtils.INPUT_DATE_FORMAT).parse("01.02.2013");
				
				log.info("Getting graph from multigraph...");
				
				UndirectedGraph<Actor, Link> graph = GraphUtils.extractUndirectedGraphFromMultiGraph(multiGraph);
				
				log.info("Graph extracted...");
				log.info("Number of vertices: " + graph.getVertexCount());
				log.info("Number of edges: " + graph.getEdgeCount());
				
				log.info("Clustering graph...");
				
				WeakComponentClusterer<Actor, Link> wcClusterer = new WeakComponentClusterer<Actor, Link>();
				Set<Set<Actor>> components = wcClusterer.transform(graph);
				
				log.info("Sorting clusters by size...");
				
				LinkedList<Set<Actor>> sortedClusters = GraphUtils.getSortedGraphClusters(components);
				
				org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics<Actor, Link> graphMetrics = new org.unige.mpej.eckmann.sonedyan.graph.GraphMetrics<Actor, Link>();
				
				NodeMetric metric = NodeMetric.BARYCENTER_CENTRALITY;
				
				// 'x' days timelag
				long timeLag = (long) 3600 * 24 * 182 * 1000;
				int timeIncrement = 1;
				
				Set<Actor> mainCluster = sortedClusters.getLast();
				
				log.info("Getting nodes time series for main cluster (#nodes '" + mainCluster.size() + "')...");
				log.info("Generating time series for main cluster nodes...");
				
				Hashtable<String, TreeMap<Date, Double>> timeSeries = graphMetrics.getSubGraphNodesTimeSeries(multiGraph, mainCluster, startDate, endDate, Calendar.MONTH, timeIncrement, timeLag, metric);
				
				DataUtils.exportTimeSeries2Arff("main-cluster-time-series.arff", timeSeries);
				org.unige.mpej.eckmann.sonedyan.fns.utils.DataUtils.exportStatusTimeSeries2Arff("main-cluster-status-time-series.arff", timeSeries, multiGraph, timeLag);	
				
				Vector<Double> avgPhdTimeSeries = org.unige.mpej.eckmann.sonedyan.fns.utils.DataUtils.getAvgStatusTimeSeries("main-cluster-status-time-series.arff", Status.PHD);
				Vector<Double> avgPostdocTimeSeries = org.unige.mpej.eckmann.sonedyan.fns.utils.DataUtils.getAvgStatusTimeSeries("main-cluster-status-time-series.arff", Status.POSTDOC);
				
				Hashtable<String, Integer> transitionIndexes = new Hashtable<String, Integer>();
				
				// '0' based indexes from 04.2003 
				transitionIndexes.put("1245", 30); // 10.2005
				transitionIndexes.put("1340", 24); // 04.2005
				transitionIndexes.put("2995", 12);
				transitionIndexes.put("483", 6);
				transitionIndexes.put("814", 48);
				transitionIndexes.put("388", 60);
				transitionIndexes.put("4347", 66);
				transitionIndexes.put("810", 6);
				transitionIndexes.put("1693", 38);
				transitionIndexes.put("479", 42);
				
				transitionIndexes.put("4533", 54);
				transitionIndexes.put("475", 6);
				transitionIndexes.put("2491", 60);
				transitionIndexes.put("1037", 30);
				transitionIndexes.put("3392", 44);
				transitionIndexes.put("1193", 22);
				transitionIndexes.put("3782", 51);
				transitionIndexes.put("4230", 66);
				transitionIndexes.put("5292", 66);

				transitionIndexes.put("1129", 30);
				transitionIndexes.put("269", 12);
				transitionIndexes.put("1574", 54);
				transitionIndexes.put("3022", 54);
				transitionIndexes.put("4677", 68);
				transitionIndexes.put("1802", 36);
				transitionIndexes.put("2867", 60);
				transitionIndexes.put("2866", 12);
				transitionIndexes.put("3379", 46);
				transitionIndexes.put("1565", 60);

				transitionIndexes.put("740", 9);
				transitionIndexes.put("1209", 14);
				transitionIndexes.put("449", 24);
				transitionIndexes.put("1854", 60);
				transitionIndexes.put("2", 12);
				transitionIndexes.put("1459", 57);
				transitionIndexes.put("1355", 66);
				transitionIndexes.put("827", 18);
				transitionIndexes.put("825", 18);
				transitionIndexes.put("2256", 56);

				transitionIndexes.put("823", 18);
				transitionIndexes.put("822", 54);
				transitionIndexes.put("820", 54);
				transitionIndexes.put("2157", 60);
				transitionIndexes.put("3747", 61);
				
				Vector<Double> avgPhdCorrelation = GraphMetrics.getTransitionNodesCorrelation(timeSeries, avgPhdTimeSeries, transitionIndexes, 6);
				Vector<Double> avgPostdocCorrelation = GraphMetrics.getTransitionNodesCorrelation(timeSeries, avgPostdocTimeSeries, transitionIndexes, 6);
				
				log.info("AVG Before PHD Correlation: " + avgPhdCorrelation.get(0));
				log.info("AVG Before POSTDOC Correlation: " + avgPostdocCorrelation.get(0));
				log.info("AVG After PHD Correlation: " + avgPhdCorrelation.get(1));
				log.info("AVG After POSTDOC Correlation: " + avgPostdocCorrelation.get(1));
			}
			/**
			 * classification tests
			 */
			else if (action.equals("c1"))
			{
				double accuracy = StatusClassifier.getClassificationAccuracy("main-cluster-status-time-series.arff");
				
				log.info("Accuracy: " + accuracy);
			}
		}
		catch(Exception e)
		{
			log.error("An error occured: " + e.getMessage());
			
			e.printStackTrace();
		}
	
	}
	
	/**
	 * display program usage
	 */
	static public void displayUsage()
	{
		System.out.println("Usage: ");
		System.out.println();
		System.out.println("JAVA_OPTIONS=\"-Dlog4j.configuration=file:conf/log4j.properties -Xms1024m -Xmx2048m\"");
		System.out.println();
		System.out.println("java $JAVA_OPTIONS -jar Sonedyan-0.1.jar <l1>");
	}
	
}
