package dmir.reaction.publico.graph.communities.heatkernel;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Set;

import org.apache.commons.cli.CommandLine;
import org.ejml.data.DenseMatrix64F;
import org.ejml.ops.CommonOps;
import org.ejml.ops.MatrixIO;

import dmir.reaction.publico.graph.elements.CoOccurrence;
import dmir.reaction.publico.graph.elements.Node;

public class HeatKernel {
		
	// stores all the entities Co-occurrence 
	static HashMap< Node, HashMap<Node,Set<CoOccurrence>> > coOccurrences = new HashMap<Node, HashMap<Node,Set<CoOccurrence>>>();
	static HashMap< Node, HashMap<Node,Set<CoOccurrence>> > filtered = new HashMap<Node, HashMap<Node,Set<CoOccurrence>>>();
	
	// keeps the nodes representing entities and their frequency
	static HashMap<Integer,Node> nodes = new HashMap<Integer,Node>();
	static HashMap<Integer, String> ids_entities = new HashMap<Integer,String>();
	static HashMap<String, Integer> entities_ids = new HashMap<String,Integer>();

	// cache for cut and vol values
	static HashMap<Node, Integer> nodeWeightedDegreeCache = new HashMap<Node, Integer>();
	static HashMap<Integer, Integer> cutCache = new HashMap<Integer, Integer>();
	
	
	public static void main(CommandLine line, HashMap<Integer,String> ids_entities_, HashMap<String, Integer> entities_ids_) throws IOException {
		
		ids_entities = ids_entities_;
		entities_ids = entities_ids_;
		System.out.println("Generating nodes...");
		generateNodes("entities_freq.txt");
		System.out.println("Reading co-occurrences...");
		readFile(line.getOptionValue("cooccurrencesFile"));
				
		int freq = Integer.parseInt(line.getOptionValue("freq"));
		if (freq!=0) {
			System.out.println("Filtering nodes w/ freq >" + line.getOptionValue("freq"));		
			filtered = filterFreq(freq);
		}
		else filtered = coOccurrences; 
				
		Set<Node> matrix_nodes = new HashSet<Node>();
		
		for (Node n : filtered.keySet()) {
			matrix_nodes.add(n);
			matrix_nodes.addAll(filtered.get(n).keySet());
		}
		
		System.out.println("Matrix size: " + matrix_nodes.size());		
		LinkedList<Node> nodes_id = new LinkedList<Node>(matrix_nodes);
		
		DenseMatrix64F adjacency = new DenseMatrix64F(matrix_nodes.size(), matrix_nodes.size());		
		DenseMatrix64F preferenceVector = new DenseMatrix64F(1, matrix_nodes.size());							
		
		System.out.println("Preparing matrix and preference vector...");		
		fillMatrixPreferenceVector(line, adjacency,preferenceVector,filtered, nodes_id);
		
		float t = Float.parseFloat(line.getOptionValue("temperature"));
		int k = Integer.parseInt(line.getOptionValue("iterations"));
		
		System.out.println("Heat Kernel...");				
		LinkedList<Node> resultVector = heatKernelPowerMethod(adjacency,preferenceVector,t,k,nodes_id);
		
		LinkedList<Double> cut_values = new LinkedList<Double>();		
		LinkedList<Node> set = new LinkedList<Node>();
		
		FileWriter results_file = new FileWriter(new File("results.tsv"));
		BufferedWriter results_bw = new BufferedWriter(results_file);
		int count = 1;
		
		//directory to save sets		
		String directory = line.getOptionValue("preference").replaceAll(" ", "_"); 
		new File(directory).mkdir();
		
		System.out.println("Calculating condunctance values...");
				
		//System.out.println("size" + '\t' + "vol"  + "\t\t" + "cut" + "\t\t" + "conductance");
		
		for (Node node : resultVector) {
			set.add(node);
			
			HashMap<String, Double > results = condunctance(set,matrix_nodes,filtered);
			
			//cut vales are used later to normalize
			cut_values.add(results.get("cut"));
			
			//save set members to file
			//generated sets are saved in: set_#.txt where # is the correspondent line number in "results.tsv" file
			FileWriter file = new FileWriter(new File(directory+"/set_" + String.valueOf(count) + ".txt"));
			BufferedWriter bw = new BufferedWriter(file);
			for (Node n : set) {								
				bw.write( String.valueOf(n.ename) + '\t' + String.valueOf(n.freq) + '\t' + String.valueOf(n.id) + '\t' + String.valueOf(n.score) + '\n');
			}
			bw.close();
			
			//save cut and condunctance values to file
			results_bw.write(String.valueOf(set.size()) + '\t' + results.get("cut") / results.get("vol") + '\t' + results.get("cut") + '\n');
			//System.out.println(String.valueOf(set.size()) + '/' + String.valueOf(resultVector.size()) + '\t' + results.get("vol") + "\t\t" + String.valueOf(results.get("cut") + "\t\t" + String.valueOf(results.get("cut") / results.get("vol"))));
			System.out.println(String.valueOf(set.size()) + '/' + String.valueOf(resultVector.size()));
			count++;
			
			if (results.get("cut")<0.0)
				break;
			
		}
		results_bw.close();
		
		//normalize cut values to [0,1]
		Double max_cut = 0.0;
		for (Double cut : cut_values) {
			if (cut>max_cut)
				max_cut=cut;
		}
		
		System.out.println("max cut: " + max_cut);		
		LinkedList<Double> cut_values_normalized = new LinkedList<Double>();
		
		for (Double c : cut_values) {
			cut_values_normalized.add((double) c / (double) max_cut);
		}
		
		//generate new results with normalized cut "results_normalized_cut.tsv" file with format:	set_size	condunctance	cut		cut_normalized		
		FileWriter resultsNormalizedFile = new FileWriter(new File("results_normalized.tsv"));
		BufferedWriter resultsNormalized_bw = new BufferedWriter(resultsNormalizedFile);
		
		FileInputStream fstream = new FileInputStream("results.tsv");
		DataInputStream in = new DataInputStream(fstream);
		BufferedReader br = new BufferedReader(new InputStreamReader(in));
		String strLine;
		resultsNormalized_bw.write("size" + '\t' + "condunctance_"+Integer.parseInt(line.getOptionValue("iterations")) + '\t' + "cut_"+Integer.parseInt(line.getOptionValue("iterations")) + '\t' + "cut normalized_"+Integer.parseInt(line.getOptionValue("iterations")) + '\n');
		while ((strLine = br.readLine()) != null)   {
			String[] data = strLine.split("\\t");
			resultsNormalized_bw.write(data[0] + '\t' + data[1] + '\t' + data[2] + '\t' + String.valueOf((double) Double.parseDouble(data[2]) / (double) max_cut) + '\n');
			
		}
		resultsNormalized_bw.close();
	}
	
	public static HashMap<String, Double> condunctance(LinkedList<Node> nodes, Set<Node> matrix_nodes, HashMap<Node, HashMap<Node, Set<CoOccurrence>>> filtered){
		
		//C = nodes in the cluster
		//V = matrix_nodes
		
		//Vol(C): sum of weighted degrees		
		int vol_c = 0;
				
		//long startTime = System.nanoTime();		
		// weighted degree
		for (Node n : nodes) {
			
			try {
				vol_c += nodeWeightedDegreeCache.get(n);
				
			} catch (Exception e) {
				int value = nodeWeightedDegree(n);
				vol_c += nodeWeightedDegree(n);
				nodeWeightedDegreeCache.put(n, value);
			}
		}
		/*
		long endTime = System.nanoTime();
		long duration = endTime - startTime;
		System.out.println("nodeWeightedDegree (vol): " + Timming.millisToShortDHMS(duration));
		*/
		
		//startTime = System.nanoTime();
		
		//cut(C): sum of the weight from edges in the frontier, edges going to nodes outside of the cluster
		//cut_i = cut_{i-1} + (new_node_#links_outside_cluster - new_node_#links_inside_cluster)
		
		int new_node_outlinks_weight = 0;
		int new_node_inlinks_weight = 0;
		
		Node n = nodes.getLast();
		Set<Node> neighbor_nodes = filtered.get(n).keySet();
		
		//if any of these is outside the cluster(C), get the weight of the connection and update the cut value 			
		for (Node nn : neighbor_nodes) {
			if (!nodes.contains(nn)) {					
				new_node_outlinks_weight =  degreeWeight(n,nn);
			}
			else {
				new_node_inlinks_weight =  degreeWeight(n,nn);
			}
		}
		int cut = 0;
		if (nodes.size()>1) {
			/*
			for (Integer c : cutCache.keySet()) {
				System.out.println("cut: " + c);
			}
			
			System.out.println("cutCache.size() : " + cutCache.size());
			System.out.println("nodes.size()-2  : " + (nodes.size()-2));
			*/
			cut = cutCache.get(nodes.size()-2) + (new_node_outlinks_weight - new_node_inlinks_weight);
			cutCache.put(nodes.size()-1, cut);
		}
		else {
			cut = vol_c;
			cutCache.put(nodes.size()-1, cut);
		}
		
		/* old method recalculates everything each time */
		/*
		int cut = 0;
		for (Node n : nodes) {
			
			//get the nodes to which the node is connected to
			Set<Node> neighbor_nodes = null; 
			neighbor_nodes = filtered.get(n).keySet();
			
			//if any of these is outside the cluster(C), get the weight of the connection and update the cut value 			
			for (Node nn : neighbor_nodes) {
				if (!nodes.contains(nn)) {					
					cut += degreeWeight(n,nn);
				}
			}
		}
		*/
		
		/*
		endTime = System.nanoTime();
		duration = endTime - startTime;		
		System.out.println("cut(C): " + Timming.millisToShortDHMS(duration));
		*/
		
		HashMap<String, Double> result = new HashMap<String, Double>();
		result.put("cut", (double) cut);
		result.put("vol", (double) vol_c);
		
		return result;
	}	
		
	public static int nodeDegree(Node node) {
		return filtered.get(node).keySet().size();			
	}
	
	public static int degreeWeight(Node node1, Node node2) {		
		return filtered.get(node1).get(node2).size();
	}
		
	public static int nodeWeightedDegree(Node node) {
		
		int degree = 0;
		
		//System.out.println("(nodeWeightedDegree) number of connections: " + filtered.get(node).keySet().size());
		
		for (Node n : filtered.get(node).keySet()) {
			degree += filtered.get(node).get(n).size();
		}
		
		return degree;
	}

	public static HashMap<Node, HashMap<Node, Set<CoOccurrence>>> filterDegree(int degree_lower_bound){
		
		HashMap< Node, HashMap<Node,Set<CoOccurrence>> > coOccurrencesfiltered = new HashMap<Node, HashMap<Node,Set<CoOccurrence>>>();
		
		for (Node node1 : coOccurrences.keySet()) {
			for (Node node2 : coOccurrences.get(node1).keySet()) {				
				
				// filter only those whose degree is higher than a lower bound
				if (coOccurrences.get(node1).get(node2).size()>degree_lower_bound) {
					
					Set<CoOccurrence> occurrences = new HashSet<CoOccurrence>(coOccurrences.get(node1).get(node2));
					
					if (!coOccurrencesfiltered.containsKey(node1)) {						
						HashMap <Node, Set<CoOccurrence>> entries = new HashMap<Node, Set<CoOccurrence>>();					
						entries.put(node2, occurrences);
						coOccurrencesfiltered.put(node1, entries);						
					}	
					
					else {
						
						if (!coOccurrencesfiltered.get(node1).containsKey(node2)) {							
							coOccurrencesfiltered.get(node1).put(node2, occurrences);						
						}
						
						else {
							System.out.println("error!");							
						}					
					}
					
				}
			}
		}
		
		return coOccurrencesfiltered;
	}
	
	public static HashMap<Node, HashMap<Node, Set<CoOccurrence>>> filterFreq(int freq_lower_bound){
		
		Set<Node> nodes_filtered = new HashSet<Node>();
		
		for (Integer id : nodes.keySet()) {
			Node n = nodes.get(id);
			if (n.freq>freq_lower_bound)
				nodes_filtered.add(n);
		}
		
		HashMap< Node, HashMap<Node,Set<CoOccurrence>> > coOccurrencesfiltered = new HashMap<Node, HashMap<Node,Set<CoOccurrence>>>();
		
		for (Node node1 : coOccurrences.keySet()) {
			for (Node node2 : coOccurrences.get(node1).keySet()) {
				
				if (nodes_filtered.contains(node1) && nodes_filtered.contains(node2)) {
					
					Set<CoOccurrence> occurrences = new HashSet<CoOccurrence>(coOccurrences.get(node1).get(node2));
					
					if (!coOccurrencesfiltered.containsKey(node1)) {						
						HashMap <Node, Set<CoOccurrence>> entries = new HashMap<Node, Set<CoOccurrence>>();					
						entries.put(node2, occurrences);
						coOccurrencesfiltered.put(node1, entries);						
					}	
					
					else {
						
						if (!coOccurrencesfiltered.get(node1).containsKey(node2)) {							
							coOccurrencesfiltered.get(node1).put(node2, occurrences);						
						}
						
						else {
							System.out.println("error!");							
						}					
					}
					
				}
			}
		}
		
		return coOccurrencesfiltered;
	}
	
	public static void readFile(String filename) throws IOException {
				
		FileInputStream fstream = new FileInputStream(filename);
		DataInputStream in = new DataInputStream(fstream);
		BufferedReader br = new BufferedReader(new InputStreamReader(in));				
		String strLine;		
		
		while ((strLine = br.readLine()) != null) {
			readLine(strLine);
		}		
	}
		
	public static void readLine(String strLine){
			
		int entity1_id = -1;
		int entity2_id = -1;
		String date = null;
		String time = null;
		String url = null;
		
		try {
			
			String[] contents = strLine.split("\t");
			entity1_id = Integer.parseInt(contents[0].split(":")[0]);
			entity2_id = Integer.parseInt(contents[1].split(":")[0]);
			date = contents[3];
			time = contents[4];
			url = contents[6];
			
		} catch (Exception e1) {
			/* some news articles don't have the URL */
			//System.out.println("no URL found");
		}
		
		Node node1 = new Node();
		Node node2 = new Node();			
		
		// get already created node objects
		try {
			
			node1 = nodes.get(entity1_id);
			node2 = nodes.get(entity2_id);

		} catch (Exception e) {
			e.printStackTrace();
		}
			
		// creates a CoOccurrence object and keeps it in a HashMap<Node,HashMap<Node,Set<CoOccurrence>>> structure
		CoOccurrence coOccurrence = new CoOccurrence(date, time, url, "", -1);
			
		if (!coOccurrences.containsKey(node1)) {
			Set<CoOccurrence> all_occurrences = new HashSet<CoOccurrence>();
			all_occurrences.add(coOccurrence);				
			HashMap <Node, Set<CoOccurrence>> entries = new HashMap<Node, Set<CoOccurrence>>();					
			entries.put(node2, all_occurrences);
			coOccurrences.put(node1, entries);
		}
				
		else {					
			if (!coOccurrences.get(node1).containsKey(node2)) {
				Set<CoOccurrence> all_occurrences = new HashSet<CoOccurrence>();
				all_occurrences.add(coOccurrence);
				coOccurrences.get(node1).put(node2, all_occurrences);
			}				
			else {
				coOccurrences.get(node1).get(node2).add(coOccurrence);
			}					
		}		
	}
		
	public static void generateNodes(String filename) throws IOException {
		
		FileInputStream fstream = new FileInputStream(filename);
		DataInputStream in = new DataInputStream(fstream);
		BufferedReader br = new BufferedReader(new InputStreamReader(in));
		String strLine;		
		
		while ((strLine = br.readLine()) != null) {
						
			int id;
			String etype = null;
			String ename = null;
			int freq;
			
			try {
				
				String[] contents = strLine.split("\t")[0].split(":");
				id = Integer.parseInt(contents[0]);
				etype = contents[1];
				ename = contents[2];				
				freq = Integer.parseInt(strLine.split("\t")[1]);
				
				// create Node objects								
				Node node = new Node(id, ename, etype, freq);
				nodes.put(id, node);

				
			} catch (Exception e1) {
				System.out.println( e1.toString() + "Error parsing: " + strLine);
			}
		}			
	}
		
	public static LinkedList<Node> heatKernelPowerMethod(DenseMatrix64F adjacency, DenseMatrix64F preferenceVector, double t, int k, LinkedList<Node> nodes_id ) {
		
		// L = I - D^1 A
		// A - denotes the adjacency matrix of G 
		// D - is the diagonal degree matrix
		// I - identity matrix
		
		DenseMatrix64F identity = CommonOps.identity(adjacency.getNumRows(), adjacency.getNumCols());
		DenseMatrix64F l = new DenseMatrix64F(adjacency.getNumRows(), adjacency.getNumCols());
		
		//if matrix L exists in disk load it, else create it
		
		File file=new File("normalized_laplacian.ser");
		boolean exists = file.exists();
		
		if (!exists) {
		
			DenseMatrix64F diagonal = new DenseMatrix64F(adjacency.getNumRows(),adjacency.getNumCols());
			
			System.out.println("Creating diagonal degree matrix ...");
			// creates the diagonal degree matrix and inverts it
			for (int i = 0; i < diagonal.getNumRows(); i++) {
				for (int j = 0; j < diagonal.getNumCols(); j++) {
					if (j!=i) {				
						diagonal.set(i, j, 0);
					}					
					else if (j==i) {
						diagonal.set(i, j, nodeDegree(nodes_id.get(i)));					
					}
				}
			}
					
			CommonOps.invert(diagonal);
			// multiplies the inverted diagonal degree by the adjacency matrix transition probability matrix W
			System.out.println("Caculating transition probability matrix W ...");
			DenseMatrix64F w = new DenseMatrix64F(adjacency.getNumRows(), adjacency.getNumCols());
			CommonOps.mult(diagonal, adjacency, w);
			//System.out.println("\nW:");
			//w.print();
			
			// creates the identity matrix, subtracts the identify matrix on the transition probability matrix W
			System.out.println("Caculating matrix L ...");			
			CommonOps.sub(identity, w, l);
			//System.out.println("\nL:");
			//l.print();
			
			//save matrix L to disk
			try {
				MatrixIO.saveBin(l, "normalized_laplacian.ser");
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
	
		else {			
			
			System.out.println("Loading matrix L from disk...");
			
			try {
				l = MatrixIO.loadBin("normalized_laplacian.ser");
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}			
		}
		
		DenseMatrix64F temp = new DenseMatrix64F(adjacency.getNumRows(), adjacency.getNumCols());
		DenseMatrix64F temp_k = new DenseMatrix64F(adjacency.getNumRows(), adjacency.getNumCols());
		CommonOps.scale((double) 1-Math.abs(t) / (double) k, l, temp);
		//System.out.println("\nscaled:");
		//temp.print();
		
		CommonOps.add(identity, temp, temp_k);
		//System.out.println("\nadd I:");
		//temp_k.print();

		for (int i = 1; i <= k; i++) {
			temp = new DenseMatrix64F(1, adjacency.getNumCols());
			CommonOps.mult(preferenceVector, temp_k, temp);
			preferenceVector = temp;
		}
		
		System.out.println("Sorting vertices ...");
		LinkedList<Node> nodes = sortVertices(preferenceVector, nodes_id);
		
		return nodes;		
	}
	
	public static LinkedList<Node> heatKernel(DenseMatrix64F adjacency, DenseMatrix64F preferenceVector, double t, int k, LinkedList<Node> nodes_id ) {
		
		// L = I - D^1 A
		// A - denotes the adjacency matrix of G 
		// D - is the diagonal degree matrix
		// I - identity matrix
		
		DenseMatrix64F identity = CommonOps.identity(adjacency.getNumRows(), adjacency.getNumCols());
		DenseMatrix64F l = new DenseMatrix64F(adjacency.getNumRows(), adjacency.getNumCols());
		
		//if matrix L exists in disk load it, else create it
		
		File file=new File("normalized_laplacian.ser");
		boolean exists = file.exists();
		
		if (!exists) {
			
			DenseMatrix64F diagonal = new DenseMatrix64F(adjacency.getNumRows(),adjacency.getNumCols());			
			System.out.println("Creating diagonal degree matrix ...");
			// creates the diagonal degree matrix and inverts it
			for (int i = 0; i < diagonal.getNumRows(); i++) {
				for (int j = 0; j < diagonal.getNumCols(); j++) {
					if (j!=i) {				
						diagonal.set(i, j, 0);
					}					
					else if (j==i) {
						diagonal.set(i, j, nodeDegree(nodes_id.get(i)));					
					}
				}
			}
					
			CommonOps.invert(diagonal);
			// multiplies the inverted diagonal degree by the adjacency matrix transition probability matrix W
			System.out.println("Caculating transition probability matrix W ...");
			DenseMatrix64F w = new DenseMatrix64F(adjacency.getNumRows(), adjacency.getNumCols());
			CommonOps.mult(diagonal, adjacency, w);
			//System.out.println("\nW:");
			//w.print();
			
			// creates the identity matrix, subtracts the identify matrix on the transition probability matrix W
			System.out.println("Caculating matrix L ...");									
			CommonOps.sub(identity, w, l);
			//System.out.println("\nL:");
			//l.print();
			
			//save matrix L to disk
			try {
				MatrixIO.saveBin(l, "normalized_laplacian.ser");
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		
		else {			
			
			System.out.println("Loading matrix L from disk...");
			
			try {
				l = MatrixIO.loadBin("normalized_laplacian.ser");
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}			
		}
		
		DenseMatrix64F temp = new DenseMatrix64F(adjacency.getNumRows(), adjacency.getNumCols());
		DenseMatrix64F temp_k = new DenseMatrix64F(adjacency.getNumRows(), adjacency.getNumCols());
		CommonOps.scale((double) 1-Math.abs(t) / (double) k, l, temp);
		//System.out.println("\nscaled:");
		//temp.print();
		
		CommonOps.add(identity, temp, temp_k);
		//System.out.println("\nadd I:");
		//temp_k.print();
			
		/* exponential: multiply matrix by itself k times */
		for (int i = 1; i <= k; i++) {
			System.out.println("iteration: " + i);
			temp = new DenseMatrix64F(adjacency.getNumRows(), adjacency.getNumCols());
			CommonOps.mult(temp_k, temp_k, temp);
			temp_k = temp;						
		}
		
		/* multiply by the preference vector */
		DenseMatrix64F preference_k = new DenseMatrix64F(1, adjacency.getNumCols());
		CommonOps.mult(preferenceVector, temp_k, preference_k);

		System.out.println("Sorting vertices ...");
		LinkedList<Node> nodes = sortVertices(preference_k, nodes_id);
		
		return nodes;
		
		}
		
	public static LinkedList<Node> sortVertices(DenseMatrix64F preference_k, LinkedList<Node> nodes_id) {
		
		LinkedList<Node> nodes = new LinkedList<Node>();		
		Comparator<Node> scoreComparator = new Comparator<Node>() {
			
			@Override
			public int compare(Node arg0, Node arg1) {
				if (arg0.score>arg1.score)
					return -1;
					else if (arg1.score>arg0.score)
						return 1;
					else return 0;
			}
		};
			
		Node node = null;
		for (int i = 0; i < preference_k.getNumElements(); i++) {			
			node = nodes_id.get(i);
			node.score = preference_k.get(i) / nodeWeightedDegree(node);
			nodes.add(node);			
		}
		
		System.out.println("Sorting " + nodes.size() + " nodes");
		
		Collections.sort(nodes, scoreComparator);
		
		System.out.println("Sorting finished");
		
		return nodes;	
		
	}
	
	public static void fillMatrixPreferenceVector(CommandLine line, DenseMatrix64F adjacency, DenseMatrix64F preferenceVector, HashMap<Node, HashMap<Node, Set<CoOccurrence>>> filtered, LinkedList<Node> nodes_id) {
		
		/*
		// fill the adjacency matrix: the ids of the nodes correspond 
		for (Node node1 : filtered.keySet()) {
			for (Node node2 : filtered.get(node1).keySet()) {
				adjacency.set(nodes_id.indexOf(node1), nodes_id.indexOf(node2), filtered.get(node1).get(node2).size());
			}
		}
		*/
		
		System.out.println("seed string: " + line.getOptionValue("preference"));
		//System.out.println("seed core id: " + entities_ids.get(line.getOptionValue("preference")));
		System.out.println("seed core node: " + nodes.get(entities_ids.get(line.getOptionValue("preference"))));
		int col = nodes_id.indexOf(nodes.get(entities_ids.get(line.getOptionValue("preference"))));
		//System.out.println("col: " + col);
		preferenceVector.set(0, col, 1.0);		
	}
}





