package wakita;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;
import java.util.TreeSet;

import metis.ConnectedComponents;
import metis.Graph;

public class WakitaIO implements WakitaConstants {

	/**
	 * @param args
	 */
	private String workingDirectory;

	private String binary;

	private String outputName;

	private HashMap<String, String> mapping, deMapping;
	HashMap<String, HashSet<String>> graph = new HashMap<String, HashSet<String>>();

	public WakitaIO(String workingDirectory, String binary, String outputName) {
		this.workingDirectory = workingDirectory;
		this.binary = binary;
		this.outputName = outputName;
	}

	public String pairsToBin() throws IOException, InterruptedException {
		String input = getInputFile(workingDirectory, ".pairs");
		String tmpOut = (String) input.subSequence(0, input.indexOf("."));
		String output = tmpOut + ".bin";

		/* convert the data in the right format */
		String cmd = "cmd.exe /c " + binary + "\\converter -n2n " + input + " " + output;

		String line;
		Process p = null;
		p = Runtime.getRuntime().exec(cmd, null, new File(workingDirectory));
		BufferedReader in = new BufferedReader(new InputStreamReader(p.getInputStream()));
//		System.out.println("\n**************************************\nConvert to input .bin-File");
//		System.out.println("Converter Command: " + cmd);
		while ((line = in.readLine()) != null) {
//			System.out.println(line);
		}
		// System.out.println("\n**************************************\nConvert to input .bin-File done");
		p.waitFor();

		return output;

	}

	/**
	 * 
	 * @param input
	 *            {@link HashMap} Vertex --> ClusterId
	 * @return {@link HashMap} ClusterId --> {@link HashSet} of Vertices
	 */
	public HashMap<Integer, HashSet<Integer>> resultsID2Vertices(HashMap<Integer, Integer> input) {
		HashMap<Integer, HashSet<Integer>> id2Vertices = new HashMap<Integer, HashSet<Integer>>();
		int vertex, clusterId;
		HashSet<Integer> tmp;
		for (Map.Entry<Integer, Integer> entry : input.entrySet()) {
			vertex = entry.getKey();
			/* those vertices which are not in a cluster are ommited (they don't belong to the graph */
			if (entry.getValue() != null) {
				clusterId = entry.getValue();
				if (id2Vertices.containsKey(clusterId)) {
					tmp = id2Vertices.get(clusterId);
					tmp.add(vertex);
				} else {
					tmp = new HashSet<Integer>();
					tmp.add(vertex);
					id2Vertices.put(clusterId, tmp);
				}
			}

		}
		return id2Vertices;
	}

	/**
	 * 
	 * @param pairsMap
	 *            {@link HashMap} Vertex --> {@link HashSet} Vertices (a pairs File as a HashMap)
	 * @param clusterMap
	 *            {@link HashMap} Vertex --> ClusterID (the clustering result)
	 * @return {@link HashMap} ClusterID --> {@link HashSet} ClusterIDs (the shrinked Network)
	 */

	public TreeMap<Integer, TreeSet<Integer>> shrinkMap(HashMap<Integer, HashSet<Integer>> pairsMap, HashMap<Integer, Integer> clusterMap) {
		int vertexA, vertexB;
		int clusterA, clusterB;
		Iterator<Integer> it;
		TreeMap<Integer, TreeSet<Integer>> shrinkedMap = new TreeMap<Integer, TreeSet<Integer>>();
		TreeSet tmp;

		for (Map.Entry<Integer, HashSet<Integer>> entry : pairsMap.entrySet()) {
			vertexA = entry.getKey();
			if (clusterMap.get(vertexA) != null) {
				if (clusterMap.containsKey(vertexA)) {
					clusterA = clusterMap.get(vertexA);
					it = entry.getValue().iterator();
					while (it.hasNext()) {
						vertexB = it.next();
						if (clusterMap.containsKey(vertexB)) {
							clusterB = clusterMap.get(vertexB);
							if (clusterB != clusterA) {
								if (shrinkedMap.containsKey(clusterA)) {
									tmp = shrinkedMap.get(clusterA);
									tmp.add(clusterB);
								} else {
									tmp = new TreeSet<Integer>();
									tmp.add(clusterB);
									shrinkedMap.put(clusterA, tmp);
								}

							}
						}

					}
				}

			}

		}

		return shrinkedMap;
	}

	public void writeMapToFile(TreeMap<Integer, TreeSet<Integer>> map, String filename) {
		int nonmembers = 0;
		String outFile = workingDirectory + File.separator + filename;
		int vertexA, vertexB;
		Iterator it;
		try {
			File file = new File(outFile);
			if (file.exists())
				file.delete();
			BufferedWriter out = new BufferedWriter(new FileWriter(outFile));
			for (Map.Entry<Integer, TreeSet<Integer>> entry : map.entrySet()) {
				vertexA = entry.getKey();
				it = map.get(vertexA).iterator();
				while (it.hasNext()) {
					vertexB = new Integer(it.next().toString());
					out.write(vertexA + " " + vertexB + "\n");
				}
			}
			out.close();
		} catch (IOException e) {
			if (WakitaMain.DEBUG)
				System.out.println("Error in writing to output file!");
		}
	}

	public void writeClusteringToFile(TreeMap<Integer, TreeSet<Integer>> map, String filename) {
		int nonmembers = 0;
		switch (WakitaMain.algorithm) {
		case CLAUSET:
			filename = filename + "-Clauset_clusters.txt";
			break;
		case DANON:
			filename = filename + "-Danon_clusters.txt";
			break;
		case HN:
			filename = filename + "-HN_clusters.txt";
			break;
		default:
			filename = filename + "-Clauset_clusters.txt";
		}
		String outFile = workingDirectory + File.separator + filename;
		int clusterID, vertex;
		Iterator it;
		String print;
		try {
			File file = new File(outFile);
			// System.out.println("Datei existiert: " + file.exists());
			if (file.exists())
				file.delete();
			BufferedWriter out = new BufferedWriter(new FileWriter(outFile));

			for (Map.Entry<Integer, TreeSet<Integer>> entry : map.entrySet()) {
				clusterID = entry.getKey();
				it = map.get(clusterID).iterator();
				print = "";
				while (it.hasNext()) {
					vertex = new Integer(it.next().toString());
					print += vertex + " ";
				}
				print += "\n";
				out.write(print);

			}
			out.close();
		} catch (IOException e) {
			System.out.println("Error in writing to output file!");
		}
	}

	/**
	 * 
	 * @param filename
	 *            this is the name of the pairs file (format: "xyz.pairs")
	 * @return {@link HashMap} with the mapping: Vertex --> {@link HashSet} Vertices
	 */
	public HashMap<Integer, HashSet<Integer>> pairs2HashMap(String filename) {
		HashMap<Integer, HashSet<Integer>> pairsMap = new HashMap<Integer, HashSet<Integer>>();
		String inputFile = workingDirectory + File.separator + filename;

		/* read the file */
		String line = null;

		try {
			FileInputStream fstream = new FileInputStream(inputFile);
			DataInputStream in = new DataInputStream(fstream);
			HashSet<Integer> tmp;
			int vertexA, vertexB;
			while ((line = in.readLine()) != null) {

				String[] elements = line.split(" |\t"); // Split by space or tab // <Added by MM>

				vertexA = new Integer(elements[0]);
				vertexB = new Integer(elements[1]);

				if (pairsMap.containsKey(vertexA)) {
					tmp = pairsMap.get(vertexA);
					tmp.add(vertexB);
				} else {
					tmp = new HashSet<Integer>();
					tmp.add(vertexB);
					pairsMap.put(vertexA, tmp);
				}
			}
			in.close();
		} catch (Exception e) {
			System.err.println("File input error");
		}

		return pairsMap;
	}

	/**
	 * 
	 * @param filename
	 *            this is the name of the pairs file (format: "xyz.pairs")
	 * @return {@link HashMap} with the mapping: Vertex --> {@link HashSet} Vertices
	 */
	public HashMap<Integer, HashSet<Integer>> pairs2HashMapNew(String filename) {
		HashMap<Integer, HashSet<Integer>> pairsMap = new HashMap<Integer, HashSet<Integer>>();

		HashMap<String, HashSet<String>> normalizedGraph = new HashMap<String, HashSet<String>>();
		String inputFile = workingDirectory + File.separator + filename;
		HashMap<String, HashSet<String>> graphWithoutDoubleEdges = new HashMap<String, HashSet<String>>();

		/* read the file */
		String line = null;

		try {
			FileInputStream fstream = new FileInputStream(inputFile);
			DataInputStream in = new DataInputStream(fstream);
			HashSet<String> tmp;
			String vertexA, vertexB;
			while ((line = in.readLine()) != null) {

				String[] elements = line.split(" |\t"); // Split by space or tab // <Added by MM>

				vertexA = elements[0];
				vertexB = elements[1];

				if (graph.containsKey(vertexA)) {
					tmp = graph.get(vertexA);
					tmp.add(vertexB);
				} else {
					tmp = new HashSet<String>();
					tmp.add(vertexB);
					graph.put(vertexA, tmp);
				}
				if (graphWithoutDoubleEdges.containsKey(vertexA)) {
					tmp = graphWithoutDoubleEdges.get(vertexA);
					tmp.add(vertexB);
				} else {
					tmp = new HashSet<String>();
					tmp.add(vertexB);
					graphWithoutDoubleEdges.put(vertexA, tmp);
				}
				if (graph.containsKey(vertexB)) {
					tmp = graph.get(vertexB);
					tmp.add(vertexA);
				} else {
					tmp = new HashSet<String>();
					tmp.add(vertexA);
					graph.put(vertexB, tmp);
				}
			}
			in.close();
		} catch (Exception e) {
			System.err.println("File input error");
		}
		int original_size = graph.size();
		/* get Largest Component */
		HashSet<String> largestComponent = getLargestComponent(graph);
		graphWithoutDoubleEdges = getConnectedGraph(largestComponent, graphWithoutDoubleEdges);
		graph = getConnectedGraph(largestComponent, graph);

		/* normalize Graph */
		mapping = getMapping(graph);
		deMapping = getDeMapping(graph);
		normalizedGraph = getNormalizedGraph(mapping, graphWithoutDoubleEdges);

		/* write Graph to File */
		File file;
		String fileoutput = workingDirectory + File.separator + filename + "_cleared.pairs";
		System.out.println(fileoutput);
		file = new File(fileoutput);
		try {
			FileWriter fw = new FileWriter(file, false);
			BufferedWriter bw = new BufferedWriter(fw);
			String key;
			HashSet value;
			String print;
			String vertexA, vertexB;
			HashSet<String> succList;
			for (Map.Entry<String, HashSet<String>> entry : normalizedGraph.entrySet()) {
				vertexA = entry.getKey();
				succList = entry.getValue();
				Iterator<String> it = succList.iterator();
				while (it.hasNext()) {
					vertexB = it.next();
					print = vertexA + " " + vertexB + "\n";
					bw.write(print);
				}
			}

			bw.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

		/* copy from Hashmap<String> to HashMap<Integer> */
		int vertex;
		String vertex_s, successor_s;
		HashSet<String> succ_s;
		HashSet<Integer> succ;
		for (Map.Entry<String, HashSet<String>> entry : normalizedGraph.entrySet()) {
			vertex_s = entry.getKey();
			vertex = new Integer(vertex_s);
			succ_s = entry.getValue();
			Iterator<String> it = succ_s.iterator();
			succ = new HashSet<Integer>();
			while (it.hasNext()) {
				successor_s = it.next();
				succ.add(new Integer(successor_s));
			}
			pairsMap.put(vertex, succ);
		}
		return pairsMap;
	}

	private HashMap<String, HashSet<String>> getNormalizedGraph(HashMap<String, String> mapping, HashMap<String, HashSet<String>> graph) {
		String vertexA, vertexB;
		String normalizedA, normalizedB;
		HashSet<String> succ, newSucc, succA, succB;
		HashMap<String, HashSet<String>> normalizedGraph = new HashMap<String, HashSet<String>>();
		for (Map.Entry<String, HashSet<String>> entry : graph.entrySet()) {
			vertexA = entry.getKey();
			normalizedA = mapping.get(vertexA);
			succ = entry.getValue();
			Iterator<String> it = succ.iterator();
			while (it.hasNext()) {
				vertexB = it.next();
				normalizedB = mapping.get(vertexB);

				if (normalizedGraph.containsKey(normalizedA)) {
					newSucc = normalizedGraph.get(normalizedA);
					newSucc.add(normalizedB);

				} else {
					newSucc = new HashSet<String>();
					newSucc.add(normalizedB);
					normalizedGraph.put(normalizedA, newSucc);

				}
				// if (normalizedGraph.containsKey(normalizedB)) {
				// newSucc = normalizedGraph.get(normalizedB);
				// newSucc.add(normalizedA);
				//
				// } else {
				// newSucc = new HashSet<String>();
				// newSucc.add(normalizedA);
				// normalizedGraph.put(normalizedB, newSucc);
				//
				// }
			}
		}
		// System.out.println("Kanten: " + edges);
		return normalizedGraph;
	}

	private HashMap<String, String> getDeMapping(HashMap<String, HashSet<String>> graph) {
		String vertexA;
		HashSet<String> succ;
		HashMap<String, String> deMapping = new HashMap<String, String>();
		int i = 0;
		for (Map.Entry<String, HashSet<String>> entry : graph.entrySet()) {
			vertexA = entry.getKey();
			deMapping.put(Integer.toString(i), vertexA);
			// System.out.println(i+" --> "+vertexA);
			i++;
		}
		return deMapping;
	}

	private HashMap<String, String> getMapping(HashMap<String, HashSet<String>> graph) {
		String vertexA;
		HashSet<String> succ;
		HashMap<String, String> mapping = new HashMap<String, String>();
		int i = 0;
		for (Map.Entry<String, HashSet<String>> entry : graph.entrySet()) {
			vertexA = entry.getKey();
			mapping.put(vertexA, Integer.toString(i));
			// System.out.println(vertexA + " --> " + i);
			i++;
		}
		return mapping;
	}

	private HashMap<String, HashSet<String>> getConnectedGraph(HashSet<String> largestComponent, HashMap<String, HashSet<String>> graph) {
		double edges = 0;
		String vertex, successor;
		HashSet<String> succ, newSucc;
		HashMap<String, HashSet<String>> newgraph = new HashMap<String, HashSet<String>>();
		for (Map.Entry<String, HashSet<String>> entry : graph.entrySet()) {
			vertex = entry.getKey();
			succ = entry.getValue();
			Iterator<String> it = succ.iterator();
			while (it.hasNext()) {
				successor = it.next();
				/* both vertices must be part of the largestComponent */
				if (largestComponent.contains(vertex) && largestComponent.contains(successor)) {
					edges++;
					if (newgraph.containsKey(vertex)) {
						newSucc = newgraph.get(vertex);
						newSucc.add(successor);
					} else {
						newSucc = new HashSet<String>();
						newSucc.add(successor);
						newgraph.put(vertex, newSucc);
					}
				}
			}
		}
		return newgraph;
	}

	private HashSet<String> getLargestComponent(HashMap<String, HashSet<String>> net) {
		System.out.println("\nFetch largest ConnectedComponent");
		int vertexA_int, vertexB_int, vertex_int;
		String vertexA_s, vertexB_s, vertex_s;
		HashSet<String> succ = new HashSet<String>();

		Graph graph = new Graph(net.size());
		/* mapping Component --> Vertices */
		HashSet<String> components;

		/* create Graph in another data structure (we need the neighbors) */
		for (Map.Entry<String, HashSet<String>> entry : net.entrySet()) {
			vertexA_s = entry.getKey();
			vertexA_int = new Integer(vertexA_s);
			succ = entry.getValue();
			Iterator<String> it = succ.iterator();
			while (it.hasNext()) {
				vertexB_s = it.next();
				vertexB_int = new Integer(vertexB_s);
				graph.addEdge(vertexA_int, vertexB_int);
			}
		}

		/* get the connected components */
		ConnectedComponents ccp = new ConnectedComponents(graph);
		System.out.println("Number of different Components: " + ccp.getNumberOfComponents());
		int max = 0;
		int index = 0;
		for (int i = 0; i < ccp.getNumberOfComponents(); i++) {
			if (ccp.getComponent(i).size() >= max) {
				index = i;
				max = ccp.getComponent(i).size();
			}
			// System.out.println(ccp.getComponent(i).toString());
		}
		components = ccp.getComponents(index);
		System.out.println("Largest ConnectedComponent: " + components.toString());
		System.out.println("Size largest ConnectedComponent: " + components.size());
		System.out.println("Fetch largest ConnectedComponent done\n");
		return components;
	}

	/**
	 * 
	 * @param algorithm
	 *            this is the chose Algorithm for the clustering (See WakitaConstants)
	 * @return {@link HashMap} Vertex --> ClusterId
	 */
	public HashMap<Integer, Integer> resultsToHashMap(int algorithm) {
		HashMap<Integer, Integer> clusterMap = new HashMap<Integer, Integer>();
		String inputFile;
		switch (algorithm) {
		case CLAUSET:
			inputFile = workingDirectory + File.separator + outputName + "-Clauset_clusters.txt";
			break;
		case DANON:
			inputFile = workingDirectory + File.separator + outputName + "-Danon_clusters.txt";
			break;
		case HN:
			inputFile = workingDirectory + File.separator + outputName + "-HN_clusters.txt";
			break;
		default:
			inputFile = workingDirectory + File.separator + outputName + "-Clauset_clusters.txt";
		}
		if (WakitaMain.DEBUG)
			System.out.println("ClusterFile: " + inputFile);

		/* read the file */
		String line = null;

		try {
			FileInputStream fstream = new FileInputStream(inputFile);
			DataInputStream in = new DataInputStream(fstream);
			int j = 0;
			int vertex, cluster;
			while ((line = in.readLine()) != null) {

				String[] elements = line.split(" |\t"); // Split by space or tab // <Added by MM>

				for (int i = 0; i < elements.length; i++) {
					vertex = new Integer(elements[i]);
					cluster = j;
					clusterMap.put(vertex, cluster);
				}
				j++;
			}
			in.close();
		} catch (Exception e) {
			System.err.println("File input error");
		}
		/* return the clustered Network: Vertex --> ClusterID */
		return clusterMap;
	}

	public String getInputFile(String workingDirectory, String ext) {

		GenericExtFilter filter = new GenericExtFilter(ext);

		File dir = new File(workingDirectory);

		if (dir.isDirectory() == false) {
			System.out.println("Directory does not exists : " + workingDirectory);
			return null;
		}

		// list out all the file name and filter by the extension
		String[] list = dir.list(filter);

		if (list.length == 0) {
			System.out.println("no files end with : " + ext);
			return null;
		}

		for (String file : list) {
			String temp = new StringBuffer(workingDirectory).append(File.separator).append(file).toString();
			if (WakitaMain.DEBUG)
				System.out.println("temp: " + file + " outputName: " + outputName);
			if (file.equals(outputName + ".pairs"))
				return temp;
		}
		return null;
	}

	// inner class, generic extension filter
	public class GenericExtFilter implements FilenameFilter {

		private String ext;

		public GenericExtFilter(String ext) {
			this.ext = ext;
		}

		public boolean accept(File dir, String name) {
			return (name.endsWith(ext));
		}
	}

	/**
	 * 
	 * @param shrinkedMap
	 *            {@link TreeMap} shrinkedMap: ClusterID --> {@link TreeSet} ClusterID
	 * @param normalizeMap
	 *            {@link HashMap} ClusterID --> NormalizedClusterID
	 * @return {@link TreeMap} NormalizedClusterID --> {@link TreeSet} NormalizedClusterIDs
	 */
	public TreeMap<Integer, TreeSet<Integer>> normalizeShrinkedMap(TreeMap<Integer, TreeSet<Integer>> shrinkedMap, HashMap<Integer, Integer> normalizeMap) {
		TreeMap<Integer, TreeSet<Integer>> shrinkedMapNormalized = new TreeMap<Integer, TreeSet<Integer>>();
		int key, value;
		int normalizedKey, normalizedValue;
		TreeSet<Integer> tmp, normalizedTreeSet;
		Iterator it;
		for (Map.Entry<Integer, TreeSet<Integer>> entry : shrinkedMap.entrySet()) {
			key = entry.getKey();
			normalizedKey = normalizeMap.get(key);
			if (!shrinkedMapNormalized.containsKey(normalizedKey)) {
				normalizedTreeSet = new TreeSet<Integer>();
				shrinkedMapNormalized.put(normalizedKey, normalizedTreeSet);
			}
			tmp = entry.getValue();
			normalizedTreeSet = shrinkedMapNormalized.get(normalizedKey);
			it = tmp.iterator();
			while (it.hasNext()) {
				value = (Integer) it.next();
				normalizedValue = normalizeMap.get(value);
				if (!normalizedTreeSet.contains(normalizedValue)) {
					normalizedTreeSet.add(normalizedValue);
				}
			}
		}
		return shrinkedMapNormalized;
	}

	/**
	 * 
	 * @param shrinkedMap
	 *            {@link TreeMap} Vertex --> {@link TreeSet} Vertices
	 * @return {@link HashMap} Vertex --> NormalizedVertex
	 */
	public HashMap<Integer, Integer> getnormalizedMapping(TreeMap<Integer, TreeSet<Integer>> shrinkedMap) {
		HashMap<Integer, Integer> normalizeMap = new HashMap<Integer, Integer>();
		int key, value, counter;
		TreeSet<Integer> tmp;
		Iterator it;
		counter = 0;
		for (Map.Entry<Integer, TreeSet<Integer>> entry : shrinkedMap.entrySet()) {
			key = entry.getKey();
			if (!normalizeMap.containsKey(key)) {
				normalizeMap.put(key, counter);
				counter++;
			}
			tmp = entry.getValue();
			it = tmp.iterator();
			while (it.hasNext()) {
				value = (Integer) it.next();
				if (!normalizeMap.containsKey(value)) {
					normalizeMap.put(value, counter);
					counter++;
				}
			}
		}
		return normalizeMap;
	}

	public HashMap<Integer, Integer> deNormalizeDeShrinkMap(HashMap<Integer, Integer> clusterMapShrinked, HashMap<Integer, Integer> normalizeMap,
			HashMap<Integer, HashSet<Integer>> clust2vertices, HashMap<Integer, Integer> clusterMap) {

		HashMap<Integer, Integer> deNormalizedMap = new HashMap<Integer, Integer>();
		HashMap<Integer, Integer> deshrinkedMap = new HashMap<Integer, Integer>();
		int key, value;

		/* Create Mapping NormalizedClusterID --> ClusterID */
		HashMap<Integer, Integer> norV2V = new HashMap<Integer, Integer>();
		for (Map.Entry<Integer, Integer> entry1 : normalizeMap.entrySet()) {
			key = entry1.getKey();
			value = entry1.getValue();
			norV2V.put(value, key);
		}

		/* Create Mapping ClusterID --> newClusterID from (NormalizedClusterID --> newClusterID AND NormalizedClusterID --> ClusterID) */
		for (Map.Entry<Integer, Integer> entry : clusterMapShrinked.entrySet()) {
			key = entry.getKey();
			value = entry.getValue();

			deNormalizedMap.put(norV2V.get(key), value);
		}

		/* Create Mapping Vertex --> newClusterID */
		for (Map.Entry<Integer, Integer> entry2 : clusterMap.entrySet()) {
			key = entry2.getKey();
			if (entry2.getValue() != null) {
				value = entry2.getValue();
				deshrinkedMap.put(key, deNormalizedMap.get(value));
				// System.out.println("Knoten " + key + " Cluster: " + deNormalizedMap.get(value));
			}

		}

		return deshrinkedMap;
	}

	public HashSet<Integer> getVertices(HashMap<Integer, Integer> clusterMap, int clusterID) {
		HashSet<Integer> vertices = new HashSet<Integer>();
		int vertex, cluster;
		/* Get the vertices that matter */
		for (Map.Entry<Integer, Integer> entry : clusterMap.entrySet()) {
			vertex = entry.getKey();
			if (entry.getValue() != null) {
				cluster = entry.getValue();
				if (cluster == clusterID) {
					vertices.add(vertex);
				}
			}
		}
		return vertices;
	}

	/**
	 * 
	 * @param network
	 *            original input File Vertex --> {@link HashSet} Vertices (Edges)
	 * @param vertices
	 *            {@link HashSet} of Vertices that have been choosen to cluster again
	 * @return {@link TreeMap} Vertex --> {@link TreeSet} Vertices that matter
	 */
	public TreeMap<Integer, TreeSet<Integer>> getPathMap(HashMap<Integer, HashSet<Integer>> network, HashSet<Integer> vertices) {
		TreeMap<Integer, TreeSet<Integer>> pathMap = new TreeMap<Integer, TreeSet<Integer>>();
		int vertexA, vertexB;
		HashSet<Integer> value;
		TreeSet<Integer> tmp = new TreeSet<Integer>();
		Iterator<Integer> it;
		for (Map.Entry<Integer, HashSet<Integer>> entry : network.entrySet()) {
			vertexA = entry.getKey();
			value = entry.getValue();
			if (vertices.contains(vertexA)) {
				tmp = new TreeSet<Integer>();
				pathMap.put(vertexA, tmp);
				it = value.iterator();
				while (it.hasNext()) {
					vertexB = it.next();
					if (vertices.contains(vertexB)) {
						tmp.add(vertexB);
					}
				}
			}
			if (tmp.isEmpty()) {
				pathMap.remove(vertexA);
			}
		}

		return pathMap;
	}

	public String getOutputName() {
		return outputName;
	}

	public void setOutputName(String outputName) {
		this.outputName = outputName;
	}

	public HashMap<Integer, Integer> deNormalizeMap(HashMap<Integer, Integer> tempClusterMap, HashMap<Integer, Integer> normalizeMap) {
		HashMap<Integer, Integer> deNormalizedMap = new HashMap<Integer, Integer>();
		int key, value;

		/* Create Mapping NormalizedVertex --> Vertex */
		HashMap<Integer, Integer> norV2V = new HashMap<Integer, Integer>();
		for (Map.Entry<Integer, Integer> entry1 : normalizeMap.entrySet()) {
			key = entry1.getKey();
			value = entry1.getValue();
			norV2V.put(value, key);
		}

		/* replace Normalized Vertex through rightVertex */
		for (Map.Entry<Integer, Integer> entry : tempClusterMap.entrySet()) {
			key = entry.getKey();
			value = entry.getValue();
			deNormalizedMap.put(norV2V.get(key), value);
			// System.out.println("Knoten: " + norV2V.get(key) + " ClusterID: " + value);
		}
		return deNormalizedMap;
	}

	/**
	 * 
	 * @param clusterMap
	 *            {@link HashMap} Vertex --> ClusterID
	 * @return clusterTreeMap {@link TreeMap} ClusterID --> {@link TreeSet} Vertices
	 */
	public TreeMap<Integer, TreeSet<Integer>> hash2TreeMap(HashMap<Integer, Integer> clusterMap) {
		TreeMap<Integer, TreeSet<Integer>> clusterTreeMap = new TreeMap<Integer, TreeSet<Integer>>();
		int key, value;
		TreeSet<Integer> tmp = new TreeSet<Integer>();
		for (Map.Entry<Integer, Integer> entry : clusterMap.entrySet()) {
			key = entry.getKey();
			if (entry.getValue() != null) {
				value = entry.getValue();
				if (clusterTreeMap.containsKey(value)) {
					tmp = clusterTreeMap.get(value);
					tmp.add(key);
				} else {
					tmp = new TreeSet<Integer>();
					tmp.add(key);
					clusterTreeMap.put(value, tmp);
				}
			}
		}
		return clusterTreeMap;
	}

	public void clearFolder(String pathName) {
		File file = new File(workingDirectory + File.separator + pathName + ".pairs");
		if (file.exists())
			file.delete();
		file = new File(workingDirectory + File.separator + pathName + ".bin");
		if (file.exists())
			file.delete();
		String history, log;
		switch (WakitaMain.algorithm) {
		case CLAUSET:
			history = workingDirectory + File.separator + outputName + "-Clauset_history.txt";
			log = workingDirectory + File.separator + outputName + "-Clauset_log.txt";
			break;
		case DANON:
			history = workingDirectory + File.separator + outputName + "-Danon_history.txt";
			log = workingDirectory + File.separator + outputName + "-Danon_log.txt";
			break;
		case HN:
			history = workingDirectory + File.separator + outputName + "-HN_history.txt";
			log = workingDirectory + File.separator + outputName + "-HN_log.txt";
			break;
		default:
			history = workingDirectory + File.separator + outputName + "-Clauset_history.txt";
			log = workingDirectory + File.separator + outputName + "-Clauset_log.txt";
		}
		file = new File(history);
		if (file.exists())
			file.delete();
		file = new File(log);
		if (file.exists())
			file.delete();
	}

	public HashMap<Integer, Integer> getClusterMap(HashMap<Integer, Integer> clusterMap) {
		HashMap<Integer, Integer> clusterMapDeNorm = new HashMap<Integer, Integer>();
		int vertex, denormalizedVertex, clusterId;
		for (Map.Entry<Integer, Integer> entry: clusterMap.entrySet()){
			vertex = entry.getKey();
			clusterId = entry.getValue();
			denormalizedVertex = new Integer(deMapping.get(Integer.toString(vertex)));
			clusterMapDeNorm.put(denormalizedVertex, clusterId);
		}
		return clusterMapDeNorm;
	}
}
