/**
 * This file is part of Sonedyan.
 * 
 * Sonedyan is free software; you can redistribute it and/or
 * modify it under the terms of the GNU General Public
 * License as published by the Free Software Foundation;
 * either version 3 of the License, or (at your option) any
 * later version.
 *
 * Sonedyan is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied
 * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
 * PURPOSE.  See the GNU General Public License for more
 * details.
 *
 * You should have received a copy of the GNU General Public
 * License along with Octave; see the file COPYING.  If not
 * see <http://www.gnu.org/licenses/>.
 * 
 * Copyright (C) 2009-2012 Jimmy Dubuisson <jimmy.dubuisson@gmail.com>
 */

package org.unige.mpej.eckmann.sonedyan.utils;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.PrintWriter;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Hashtable;
import java.util.Set;
import java.util.TreeMap;
import java.util.Vector;
import org.apache.log4j.Logger;

import edu.uci.ics.jung.graph.SparseMultigraph;

import org.unige.mpej.eckmann.sonedyan.fns.bean.Actor;
import org.unige.mpej.eckmann.sonedyan.fns.bean.Link;

/**
 * set of util methods for managing produced data
 */
public class DataUtils
{
	private static Logger log = Logger.getLogger(org.unige.mpej.eckmann.sonedyan.utils.DataUtils.class);
	
	public static final String DISPLAY_DATE_FORMAT = "MM-dd-yyyy";
	public static final String INPUT_DATE_FORMAT = "dd.MM.yyyy";
	public static final String REDUCED_DATE_FORMAT = "MM.yyyy";
	
	/**
	 * write time series datasets in a gnuplot formatted file 
	 */
	public static void writeTimeSeriesDatasets(String fileName, Vector<TreeMap<Date, Double>> datasets)
	{
		PrintWriter out = null;

		try
		{
			out = new PrintWriter(new BufferedWriter(new FileWriter(fileName)));
			
			SimpleDateFormat df = new SimpleDateFormat(DISPLAY_DATE_FORMAT);
			
			for (int i = 0; i < datasets.size(); i++)
			{
				TreeMap<Date, Double> dataset = datasets.get(i);
				
				Set<Date> dates = dataset.keySet();
				
				for (Date d : dates)
				{
					out.println(df.format(d) + " " + dataset.get(d));
				}
				
				// two blank lines should separate the different datasets
				out.print("\n\n");
			}
			
			out.close();
		}
		catch(Exception e)
		{
			log.error("Unable to write time series datasets: " + e.getMessage());
		}
		finally
		{
			try
			{
				if (out != null)
				{
					out.close();
				}
			}
			catch(Exception e){}
		}
	}
	
	/**
	 * load time series datasets in a gnuplot formatted file 
	 */
	public static Vector<TreeMap<Date, Double>> loadTimeSeriesDatasets(String fileName)
	{
		BufferedReader br = null;
		Vector<TreeMap<Date, Double>> datasets = new Vector<TreeMap<Date, Double>>();
		SimpleDateFormat formatter = new SimpleDateFormat(DISPLAY_DATE_FORMAT);
		TreeMap<Date, Double> dataset;
		
		try
		{
			br = new BufferedReader(new FileReader(fileName));
			
			String line;
			boolean wasReset = false;
			dataset = new TreeMap<Date, Double>();
			
			
			while ((line = br.readLine()) != null)
			{
				line = line.trim();
				
				if (line.length() > 0)
				{
					wasReset = false;
					String[] columns = line.split(" ");
					
					Date date = formatter.parse(columns[0]);
					Double value = Double.valueOf(columns[1]);
					dataset.put(date, value);
				}
				else
				{
					if (!wasReset)
					{
						wasReset = true;
						datasets.add(dataset);
						dataset = new TreeMap<Date, Double>();
					}
				}
			}
			
			if (!wasReset)
			{
				datasets.add(dataset);
			}
			
			return datasets;
		}
		catch(Exception e)
		{
			log.error("An error occured: " + e.getMessage());
			
			return null;
		}
		finally
		{
			try
			{
				if (br != null)
				{ 
					br.close(); 
				}
			}
			catch(Exception e){}
		}	
	}
	
	/**
	 * export to arff format
	 * 
	 * NB: by exporting to this format, the nodes that have no connection are discarded
	 */
	public static void export2Arff(String fileName, SparseMultigraph<Actor, Link> multiGraph)
	{
		PrintWriter out = null;
	
		try
		{
			log.info("Exporting graph (# vertices: '" + multiGraph.getVertices().size() + "', # edges: '" + multiGraph.getEdges().size() + "') to Arff format...");
			
			out = new PrintWriter(new BufferedWriter(new FileWriter(fileName)));
			ArrayList<Link> list = new ArrayList<Link>(multiGraph.getEdges());
			
			// see http://weka.wikispaces.com/ARFF+%28stable+version%29
			out.print("% FNS data\n\n");
			
			out.print("@RELATION peopleWorkingOnACommonFNSProject\n\n");
			
			out.println("@ATTRIBUTE fromId integer");
			out.println("@ATTRIBUTE fromStatus {PHD, POSTDOC, PROFESSOR}");
			out.println("@ATTRIBUTE toId integer");
			out.println("@ATTRIBUTE toStatus {PHD, POSTDOC, PROFESSOR}");
			out.println("@ATTRIBUTE projectId integer");
			out.println("@ATTRIBUTE startDate date \"" + REDUCED_DATE_FORMAT + "\"");
			out.print("@ATTRIBUTE endDate date \"" + REDUCED_DATE_FORMAT + "\"\n\n");
			
			out.println("@DATA");
			
			SimpleDateFormat formatter = new SimpleDateFormat(REDUCED_DATE_FORMAT);
			int counter = 0;
			
			for (Link e : list)
			{
				out.println(e.getFromActorId() + "," + e.getFromStatus() + "," + e.getToActorId() + "," 
						+ e.getToStatus() + "," + e.getLinkId() + "," + formatter.format(e.getStartDate()) + "," + formatter.format(e.getEndDate()));
				counter++;
			}
			
			log.debug("Export finished: '" + counter + "' lines exported...");
			
			out.close();
		}
		catch(Exception e)
		{
			log.error("Unable to export to arff format: " + e.getMessage());
		}
		finally
		{
			try
			{
				if (out != null)
				{
					out.close();
				}
			}
			catch(Exception e){}
		}
	}
	
	/**
	 * export time series to arff format
	 */
	public static void exportTimeSeries2Arff(String fileName, Hashtable<String, TreeMap<Date, Double>> timeSeries)
	{
		PrintWriter out = null;
	
		try
		{
			Set<String> ids = timeSeries.keySet();
			
			log.info("Exporting time series (# vertices: '" + ids.size() + "') to Arff format...");
			
			out = new PrintWriter(new BufferedWriter(new FileWriter(fileName)));
			
			// see http://weka.wikispaces.com/ARFF+%28stable+version%29
			out.print("% FNS data subgraph node time series\n\n");
			
			out.print("@RELATION fnsProjectParticipationSubGraphNodesTimeSeries\n\n");
			
			boolean writtenDateAttributes = false;
			
			for (String id : ids)
			{
				TreeMap<Date, Double> nodeTimeSeries = timeSeries.get(id);
				
				Set<Date> dates = nodeTimeSeries.keySet();
				
				if (!writtenDateAttributes)
				{
					int counter = 1;
					
					for (Date d : dates)
					{
						out.println("@ATTRIBUTE t" + counter + " real");
						counter++;
					}
					
					out.println("\n@DATA");
					
					writtenDateAttributes = true;
				}
				
				boolean first = true;
				StringBuffer values = new StringBuffer();
				
				for (Date d : dates)
				{
					if (first)
					{
						values.append(nodeTimeSeries.get(d));
						first = false;
					}
					else
					{
						values.append("," + nodeTimeSeries.get(d));
					}
				}
				
				out.println(values.toString());
			}
			
			log.debug("Export finished...");
			
			out.close();
		}
		catch(Exception e)
		{
			log.error("Unable to export time series to arff format: " + e.getMessage());
		}
		finally
		{
			try
			{
				if (out != null)
				{
					out.close();
				}
			}
			catch(Exception e){}
		}
	}
	
}
