package edu.cmu.vlis.datamining.core;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Properties;
import java.util.Set;

import org.jgrapht.alg.NeighborIndex;
import org.jgrapht.graph.DefaultDirectedWeightedGraph;
import org.jgrapht.graph.Multigraph;

import edu.cmu.vlis.datamining.utils.IOUtils;
import edu.cmu.vlis.datamining.utils.NodePair;
import edu.cmu.vlis.datamining.utils.ParserBlogBlog;
import edu.cmu.vlis.datamining.utils.ParserPostPost;
import edu.cmu.vlis.datamining.utils.TestGraphParser;
import edu.cmu.vlis.datamining.TopologicalFeatures.InformationCascadeFeatureExtractor;

public class MainTopological {

	/**
	 * @param args
	 */
	public static void main(String[] args) {
		// TODO Auto-generated method stub
		long curTime = System.currentTimeMillis();
		loadProperties(args[0]);

		IOUtils utils = new IOUtils();

		DefaultDirectedWeightedGraph<Vertex, Edge> trainingPostGraph;
		//DefaultDirectedWeightedGraph<Vertex, Edge> testPostGraph;

		Multigraph<Vertex, Edge> trainingBlogGraph;
		Multigraph<Vertex, Edge> testBlogGraph;

		Map<String,Vertex> trgGraphVertexMap;
		//Map<Integer,String> trgPostBlogMap;

		Map<Vertex,Set<Integer>> cascadeFList=null;

		File trainingGraphBlogFile = new File(System.getProperty("trainingGraphBinFile"));
		File trainingGraphPostFile = new File(System.getProperty("trainingGraphPostPostBinFile"));  
		//File trainingBlogPostMapFile = new File(System.getProperty("trainingGraphBlogPostMapFile"));
		File trgVertexFile = new File(System.getProperty("trgVertexFile"));
		File trgCascadeFeatureFile = new File(System.getProperty("trgCascadeFeatureFile"));

		File newEdgesInFile = new File(System.getProperty("newEdgesFile"));
		File testVerticesFile = new File(System.getProperty("testVerticesFile"));

		System.out.println("Attempting to create Post Post Training Graph");
		if(trainingGraphPostFile.exists()){
			System.out.println("Reading from Serialized object");
			trainingPostGraph = (DefaultDirectedWeightedGraph<Vertex, Edge>)utils.deserialzeObjFrom(trainingGraphPostFile);
			//	trgPostBlogMap    = (HashMap<Integer,String>)utils.deserialzeObjFrom(trainingBlogPostMapFile);
			//trgGraphVertexMap = (HashMap<String, Vertex>)utils.deserialzeObjFrom(trgVertexFile);
		}
		else{
			ParserPostPost parser = new ParserPostPost();
			parser.parse(new File(System.getProperty("trainingDataDir")));
			parser.printStats();
			trainingPostGraph =  parser.getGraph();
			//	trgPostBlogMap = parser.getBlogPostMap();
			utils.serailizeObj(trainingPostGraph, trainingGraphPostFile); // Serialize for future use.
			//	utils.serailizeObj(trgPostBlogMap, trainingBlogPostMapFile);
		}

		//BlogBlog Graph
		System.out.println("Attempting to create Blog BLog Training Graph");
		if(trainingGraphBlogFile.exists()){
			System.out.println("Reading from Serialized object");
			trainingBlogGraph = (Multigraph<Vertex, Edge>)utils.deserialzeObjFrom(trainingGraphBlogFile);
			trgGraphVertexMap = (HashMap<String, Vertex>)utils.deserialzeObjFrom(trgVertexFile);
		}
		else{
			ParserBlogBlog parser = new ParserBlogBlog();
			parser.parse(new File(System.getProperty("trainingDataDir")));
			parser.printStats();
			trainingBlogGraph =  parser.getGraph();
			trgGraphVertexMap = parser.getVertexMap();

			utils.serailizeObj(trainingBlogGraph, trainingGraphBlogFile); // Serialize for future use.
			utils.serailizeObj(trgGraphVertexMap, trgVertexFile);        
		}
		if(!trgCascadeFeatureFile.exists()) {
			// get all the cascades from the graph
			long starttime = System.currentTimeMillis();
			InformationCascadeFeatureExtractor ic= new InformationCascadeFeatureExtractor(trainingPostGraph,
					trainingBlogGraph,
					//		trgPostBlogMap,
					trgGraphVertexMap);
			cascadeFList =ic.ExtractFeatures();
			long endTime = System.currentTimeMillis();
			double time  =(endTime-starttime)/(1000);

			System.out.println("Done with Training in " + time + " seconds ");
			utils.serailizeObj(cascadeFList, trgCascadeFeatureFile);
		}
		else
			cascadeFList = (HashMap<Vertex,Set<Integer>>)utils.deserialzeObjFrom(trgCascadeFeatureFile);



		System.out.println("Starting parsing of test files..");

		Set<NodePair> newEdges;
		Set<Vertex> testVertices;
		if(newEdgesInFile.exists() && testVerticesFile.exists()){
			newEdges = (Set<NodePair>)utils.deserialzeObjFrom(newEdgesInFile);
			testVertices = (Set<Vertex>)utils.deserialzeObjFrom(testVerticesFile);
		}

		else{
			TestGraphParser testGraphParser = new TestGraphParser(trgGraphVertexMap,trainingBlogGraph);
			newEdges = testGraphParser.getNewEdges(new File(System.getProperty("testDataDir")));
			testVertices = testGraphParser.getVerticesToItr(); 
			utils.serailizeObj(newEdges, newEdgesInFile);
			utils.serailizeObj(testVertices, testVerticesFile);

		} 
		System.out.println("New edges to predict: "+ newEdges.size());
		System.out.println("Vertices to iterate: "+ testVertices.size());

		GraphPostProcessor postProcessedGraph = new GraphPostProcessor(trainingBlogGraph);
		NeighborIndex<Vertex, Edge> neighborIndex = postProcessedGraph.getNeighborIndex();
		System.out.println("Neighbor Index Created !");
		trainingBlogGraph = postProcessedGraph.removeVerticesWithNoEdges();
		System.out.println("Isolated vertices removed.");
		int k =  newEdges.size();
		List<PriorityQueue<NodePair>> predictedEdges = new FeatureExtractor(cascadeFList).computeFeatures(trainingBlogGraph,testVertices,neighborIndex,k);
		System.out.println("All features are computed.");
		utils.serailizeObj(predictedEdges, new File(System.getProperty("predictedEdges")));

		Set<NodePair> allPredictedEdges = new HashSet<NodePair>();
		for(int i=0; i < 5; i++){
			Set<NodePair> predicted = new HashSet<NodePair>(Arrays.asList(predictedEdges.get(i).toArray(new NodePair[0])));
			allPredictedEdges.addAll(new HashSet<NodePair>(predicted));
			predicted.retainAll(newEdges);
			System.out.println("Correctly predicted for feature "+i+ "\t"+predicted.size());
		}
		allPredictedEdges.retainAll(newEdges);
		System.out.println("Correctly predicted after combining : "+ allPredictedEdges.size());
		File fTest = new File("testResults") ;
		String str1 = "";
		for(NodePair np : allPredictedEdges){
			for(int i=0; i<5;i++ )
				str1 = str1 + np.getFeatureValOf(i)+ "\t";
			str1  = str1 + "\n" ;

		}
		try {
			BufferedWriter fout = new BufferedWriter(new FileWriter(fTest));    

			fout.write(str1);
			fout.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		System.out.println("Total time in hours: "+ ((System.currentTimeMillis()-curTime)/(1000*60*60)));

	}


	private static void loadProperties(String propFile){
		Properties props = new Properties();
		try {
			props.load(new FileInputStream(propFile));
			System.getProperties().putAll(props);

		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		}
	}


}
