package com.tcs.utils;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import com.tcs.exp.BirchClustersQuality;
import com.tcs.exp.CoinClustersQuality;
import com.tcs.tsrm.beans.Cluster;
import com.tcs.tsrm.beans.ClusteringThresholds;
import com.tcs.tsrm.beans.Messages;
import com.tcs.tsrm.beans.MinDistCluster;
import com.tcs.tsrm.birch.JBirch;

public class PrintingClusters {
	private static int setSupport = 0;
	String algorithm;
	int motifWidth;
	String sensorName;
	boolean randomization;

	public PrintingClusters(String algorithm, int motifWidth,
			String sensorName, boolean random) {
		this.algorithm = algorithm;
		this.motifWidth = motifWidth;
		this.sensorName = sensorName;
		this.randomization = random;
	}

	public PrintingClusters() {
	}

	public void printCluster(ArrayList<Cluster> allClustersR,
			ClusteringThresholds thresholds) throws IOException {
		setSupport = thresholds.getSupport();
		File fileOriginal;
		if (randomization) {
			fileOriginal = new File("Output/Random/"
					+ thresholds.getAlgorithm() + "/"
					+ thresholds.getSensorName() + thresholds.getMotifWidth()
					+ "/" + "Clusters-Indexes.txt");
			fileOriginal.getParentFile().mkdirs();
		} else {
			fileOriginal = new File("Output" + "/" + thresholds.getAlgorithm()
					+ "/" + thresholds.getSensorName()
					+ thresholds.getMotifWidth() + "/" + "Clusters-Indexes.txt");
			fileOriginal.getParentFile().mkdirs();
		}

		FileWriter csvOriginalStream = new FileWriter(fileOriginal);
		BufferedWriter csvOriginalOut = new BufferedWriter(csvOriginalStream);
		try {
			int counter = 1;
			for (Cluster nextCluster : allClustersR) {
				csvOriginalOut.write(counter + ","
						+ nextCluster.getClusterMembersString() + "\n");
				counter++;
			}
			csvOriginalOut.close();
		} catch (IOException e) {
			e.printStackTrace();
		}

	}

	// Print Clusters with Subsequences Index in them

	public void printClusterJBirch(ArrayList<ArrayList<Integer>> clusters,
			ClusteringThresholds thresholds) throws IOException {
		int counter = 1;
		File fileOriginal;
		if (randomization) {
			fileOriginal = new File("Output/Random" + "/"
					+ thresholds.getAlgorithm() + "/"
					+ thresholds.getSensorName() + thresholds.getMotifWidth()
					+ "/" + "Clusters-Indexes.txt");
			fileOriginal.getParentFile().mkdirs();
		} else {
			fileOriginal = new File("Output" + "/" + thresholds.getAlgorithm()
					+ "/" + thresholds.getSensorName()
					+ thresholds.getMotifWidth() + "/" + "Clusters-Indexes.txt");
			fileOriginal.getParentFile().mkdirs();
		}
		FileWriter csvOriginalStream = new FileWriter(fileOriginal);
		BufferedWriter csvOriginalOut = new BufferedWriter(csvOriginalStream);
		for (ArrayList<Integer> subclust : clusters) {
			csvOriginalOut.write("" + counter++);
			for (Integer rowIndex : subclust) {
				csvOriginalOut.write("," + rowIndex);
			}
			csvOriginalOut.write("\n");
		}
		csvOriginalOut.close();
	}

	public void printMinDistClusters(ArrayList<MinDistCluster> allClusters) {
		int counter = 1;
		try {
			for (MinDistCluster eachCluster : allClusters) {
				if (eachCluster.getSupport() > setSupport) {
					FileWriter fstream = new FileWriter("MotifNo." + counter++
							+ ".csv");
					BufferedWriter out = new BufferedWriter(fstream);
					out.write(eachCluster.getSubsequences());
					out.close();
				}
			}
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	public void printStats(String sensorName, int motifWidth, String algo,
			long preProcessingTime, int nonTrivialMatSize, long clusteringTime,
			ArrayList<Cluster> allClusters, boolean random,
			boolean removeSigletons, boolean singletonsRemoved,
			boolean isTopkRHP, boolean isRangeFilter,
			boolean isAngleBasedFilterRHP) throws IOException {
		int[] supportStats = getSupportStats(allClusters);
		double meanSupport = getMeanSupport(allClusters);
		long ppSecs = preProcessingTime / 1000;
		long ppMin = ppSecs / 60;
		long ppHrs = ppMin / 60;
		long ppRemMilli = preProcessingTime - ppSecs * 1000;
		long ppRemSec = ppSecs - ppMin * 60;
		long ppRemMin = ppMin - ppHrs * 60;
		String preProcessingTimeString = ppHrs + ":" + ppRemMin + ":"
				+ ppRemSec + "::" + ppRemMilli;
		long clusteringSecs = clusteringTime / 1000;
		long clusteringMin = clusteringSecs / 60;
		long clusteringHrs = clusteringMin / 60;
		long clusteringRemMilli = clusteringTime - clusteringSecs * 1000;
		long clusteringRemSec = clusteringSecs - clusteringMin * 60;
		long clusteringRemMin = clusteringMin - clusteringHrs * 60;
		String clusteringTimeString = clusteringHrs + ":" + clusteringRemMin
				+ ":" + clusteringRemSec + "::" + clusteringRemMilli;
		File fileOriginal;
		if (randomization) {
			fileOriginal = new File("Output/Random" + "/"
					+ "ClusteringStats.txt");
			fileOriginal.getParentFile().mkdirs();
		} else {
			fileOriginal = new File("Output" + "/" + "ClusteringStats.txt");
			fileOriginal.getParentFile().mkdirs();
		}

		PrintWriter statOut2 = new PrintWriter(new BufferedWriter(
				new FileWriter(fileOriginal, true)));
		File file = new File(fileOriginal.getAbsolutePath());
		if (file.length() == 0) {
			statOut2.write("Algorithm" + "\t" + "sensorName" + "\t"
					+ "motifWidth" + "\t" + "random" + "\t" + "Top-k" + "\t"
					+ "Range Filter" + "\t" + "Angle Filter" + "\t"
					+ "preProcessingTime" + "\t" + "nonTrivialMatSize" + "\t"
					+ "clusteringTime" + "\t" + "No. of Clusters" + "\t"
					+ "minSupport" + "\t" + "maxSupport" + "\t"
					+ "singleton Counts" + "\t" + "sig. clusters count" + "\t"
					+ "meanSupport");
		}
		statOut2.write("\n" + algo + "\t" + sensorName + "\t" + motifWidth
				+ "\t" + random + "\t" + isTopkRHP + "\t" + isRangeFilter
				+ "\t" + isAngleBasedFilterRHP + "\t" + preProcessingTimeString
				+ "\t" + nonTrivialMatSize + "\t" + clusteringTimeString + "\t"
				+ allClusters.size() + "\t" + supportStats[0] + "\t"
				+ supportStats[1] + "\t" + supportStats[2] + "\t"
				+ supportStats[3] + "\t" + meanSupport);
		statOut2.close();
	}

	private double getMeanSupport(ArrayList<Cluster> allClusters) {
		double supportSum = 0;
		for (Cluster eachCluster : allClusters) {
			supportSum += eachCluster.getSupport();
		}
		return supportSum / allClusters.size();
	}

	private int[] getSupportStats(ArrayList<Cluster> allClusters) {
		int minSupport = allClusters.get(0).getSupport();
		int maxSupport = allClusters.get(0).getSupport();
		int newSupport = 0;
		int singletonCount = 0;
		int sigClustersCount = 0;
		for (Cluster eachCluster : allClusters) {
			newSupport = eachCluster.getSupport();
			if (newSupport < minSupport) {
				minSupport = newSupport;
			}
			if (newSupport > maxSupport) {
				maxSupport = newSupport;
			}
			if (newSupport == 1) {
				singletonCount++;
			}
			if (newSupport > setSupport) {
				sigClustersCount++;
			}

		}
		int[] supportStats = new int[4];
		supportStats[0] = minSupport;
		supportStats[1] = maxSupport;
		supportStats[2] = singletonCount;
		supportStats[3] = sigClustersCount;

		return supportStats;
	}

	public void printStatsBirchOnConsole(String sensorName, String motifWidth,
			String algo, long preProcessingTime, int nonTrivialMatSize,
			long clusteringTime, ArrayList<ArrayList<Integer>> birchClusters,
			boolean lSH, boolean removeSingletons, boolean printSingletons,
			int B) throws IOException {

	}

	// Print Clustering Efficiency

	public void printStatsBirch(String sensorName, int motifWidth, String algo,
			long preProcessingTime, int nonTrivialMatSize, long clusteringTime,
			ArrayList<ArrayList<Integer>> birchClusters, boolean random,
			boolean removeSingletons, boolean printSingletons, int B)
					throws IOException {

		int[] supportStats = getBirchSupportStats(birchClusters);
		double meanSupport = getBirchMeanSupport(birchClusters);
		File fileOriginal;
		if (randomization) {
			fileOriginal = new File("Output/Random" + "/"
					+ "ClusteringStats.txt");
			fileOriginal.getParentFile().mkdirs();
		} else {
			fileOriginal = new File("Output" + "/" + "ClusteringStats.txt");
			fileOriginal.getParentFile().mkdirs();
		}

		PrintWriter statOut2 = new PrintWriter(new BufferedWriter(
				new FileWriter(fileOriginal, true)));
		File file = new File(fileOriginal.getAbsolutePath());
		if (file.length() == 0) {
			statOut2.write("Algorithm" + "\t" + "sensorName" + "\t"
					+ "motifWidth" + "\t" + "random" + "\t"
					+ "preProcessingTime" + "\t" + "nonTrivialMatSize" + "\t"
					+ "clusteringTime" + "\t" + "No. of Clusters" + "\t"
					+ "minSupport" + "\t" + "maxSupport" + "\t"
					+ "singleton Counts" + "\t" + "sig. clusters count" + "\t"
					+ "meanSupport");
		}

		long ppSecs = preProcessingTime / 1000;
		long ppMin = ppSecs / 60;
		long ppHrs = ppMin / 60;
		long ppRemMilli = preProcessingTime - ppSecs * 1000;
		long ppRemSec = ppSecs - ppMin * 60;
		long ppRemMin = ppMin - ppHrs * 60;
		String preProcessingTimeString = ppHrs + ":" + ppRemMin + ":"
				+ ppRemSec + "::" + ppRemMilli;
		long clusteringSecs = clusteringTime / 1000;
		long clusteringMin = clusteringSecs / 60;
		long clusteringHrs = clusteringMin / 60;
		long clusteringRemMilli = clusteringTime - clusteringSecs * 1000;
		long clusteringRemSec = clusteringSecs - clusteringMin * 60;
		long clusteringRemMin = clusteringMin - clusteringHrs * 60;
		String clusteringTimeString = clusteringHrs + ":" + clusteringRemMin
				+ ":" + clusteringRemSec + "::" + clusteringRemMilli;
		statOut2.write("\n" + algo + "\t" + sensorName + "\t" + motifWidth
				+ "\t" + random + "\t" + preProcessingTimeString + "\t"
				+ nonTrivialMatSize + "\t" + clusteringTimeString + "\t"
				+ birchClusters.size() + "\t" + supportStats[0] + "\t"
				+ supportStats[1] + "\t" + supportStats[2] + "\t"
				+ supportStats[3] + "\t" + meanSupport);
		statOut2.close();
	}

	private double getBirchMeanSupport(
			ArrayList<ArrayList<Integer>> birchClusters) {
		double supportSum = 0;
		for (ArrayList<Integer> eachCluster : birchClusters) {
			supportSum += eachCluster.size();
		}
		return supportSum / birchClusters.size();
	}

	private int[] getBirchSupportStats(
			ArrayList<ArrayList<Integer>> birchClusters) {

		int minSupport = birchClusters.get(0).size();
		int maxSupport = birchClusters.get(0).size() - 1;
		int newSupport = 0;
		int singletonCount = 0;
		int sigClustersCount = 0;
		for (ArrayList<Integer> eachCluster : birchClusters) {
			newSupport = eachCluster.size();

			if (newSupport < minSupport) {
				minSupport = newSupport;
			}
			if (newSupport > maxSupport) {
				maxSupport = newSupport;
			}
			if (newSupport == 1) {
				singletonCount++;
			}
			if (newSupport > setSupport) {
				sigClustersCount++;
			}

		}
		int[] supportStats = new int[4];
		supportStats[0] = minSupport;
		supportStats[1] = maxSupport;
		supportStats[2] = singletonCount;
		supportStats[3] = sigClustersCount;

		return supportStats; 
	}

	public void printCoinClusterQualityStats(String sensorName,
			String motifWidth, String algo, ArrayList<Cluster> allClustersBST)
					throws IOException {
		int[] supportStats = getSupportStats(allClustersBST);
		double meanSupport = getMeanSupport(allClustersBST);
		CoinClustersQuality ccQuality = new CoinClustersQuality(sensorName,
				motifWidth, algo, allClustersBST, Cluster.originalMatrixHashMap);
		PrintWriter statOut2;
		if (randomization) {
			statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
					"RandomClustersQualityStats.txt", true)));
		} else {
			statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
					"ClustersQualityStats.txt", true)));
		}

		statOut2.write("\n" + algo + "\t" + sensorName + "\t" + motifWidth
				+ "\t" + ccQuality.getMaxEucDist() + "\t"
				+ ccQuality.getMeanEucDist());
		double[] median = ccQuality.getMedianEucDist();
		if (median.length == 2) {
			statOut2.write("\t" + median[0] + "," + median[1]);
		} else {
			statOut2.write("\t" + median[0]);
		}
		statOut2.write("\t" + allClustersBST.size() + "\t" + supportStats[0]
				+ "\t" + supportStats[1] + "\t" + supportStats[2] + "\t"
				+ supportStats[3] + "\t" + meanSupport);
		statOut2.close();
	}

	public void printBirchQualityStats(String sensorName, String motifWidth,
			String algo, ArrayList<ArrayList<Integer>> birchClusters)
					throws IOException {
		BirchClustersQuality birchQuality = new BirchClustersQuality(
				sensorName, motifWidth, algo, birchClusters,
				Cluster.originalMatrixHashMap);
		int[] supportStats = getBirchSupportStats(birchClusters);
		double meanSupport = getBirchMeanSupport(birchClusters);
		PrintWriter statOut2;
		if (randomization) {
			statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
					"RandomClustersQualityStats.txt", true)));
		} else {
			statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
					"ClustersQualityStats.txt", true)));
		}
		statOut2.write("\n" + algo + "\t" + sensorName + "\t" + motifWidth
				+ "\t" + birchQuality.getMaxEucDist() + "\t"
				+ birchQuality.getMeanEucDist());
		double[] median = birchQuality.getMedianEucDist();
		if (median.length == 2) {
			statOut2.write("\t" + median[0] + "," + median[1]);
		} else {
			statOut2.write("\t" + median[0]);
		}
		statOut2.write("\t" + birchClusters.size() + "\t" + supportStats[0]
				+ "\t" + supportStats[1] + "\t" + supportStats[2] + "\t"
				+ supportStats[3] + "\t" + meanSupport);
		statOut2.close();

	}

	public void printOriginalSubseqMatrix(double matrix[][], int nRows,
			int nCols) {

		try {
			File fileOriginal = new File("dataSetOld" + nCols + ".csv");
			FileWriter csvOriginalStream = new FileWriter(fileOriginal);
			int j;
			for (int i = 0; i < nRows; i++) {
				for (j = 0; j < (nCols - 1); j++) {
					csvOriginalStream.write(String.valueOf(matrix[i][j + 1]));
					csvOriginalStream.write(',');
				}
				csvOriginalStream.write(String.valueOf(matrix[i][j + 1]));
				csvOriginalStream.write('\n');

			}
			csvOriginalStream.close();
		} catch (IOException e) {
			e.printStackTrace();
		}

	}

	public void printClusterOrgSubs(ClusteringThresholds thresholds,
			ArrayList<Cluster> allClusters, ArrayList<double[]> nonTrivialMatrix) {

		try {
			for (Cluster nextCluster : allClusters) {
				File fileOriginal;
				if (randomization) {
					fileOriginal = new File("Output/Random" + "/"
							+ thresholds.getAlgorithm() + "/"
							+ thresholds.getSensorName()
							+ thresholds.getMotifWidth() + "/" + "Clusters-Org"
							+ "/" + nextCluster.getClusterId() + ".txt");
					fileOriginal.getParentFile().mkdirs();
				} else {
					fileOriginal = new File("Output" + "/"
							+ thresholds.getAlgorithm() + "/"
							+ thresholds.getSensorName()
							+ thresholds.getMotifWidth() + "/" + "Clusters-Org"
							+ "/" + nextCluster.getClusterId() + ".txt");
					fileOriginal.getParentFile().mkdirs();
				}
				FileWriter csvOriginalStream = new FileWriter(fileOriginal);
				BufferedWriter csvOriginalOut = new BufferedWriter(
						csvOriginalStream);
				csvOriginalOut.write(nextCluster
						.getAllMemberCoordinatesOfOriginalMatrix());
				csvOriginalOut.close();
				csvOriginalStream.close();
			}
		} catch (IOException e) {
			e.printStackTrace();
		}

	}

	public void printClusterNonTrivalSubs(ClusteringThresholds thresholds,
			ArrayList<Cluster> allClusters, ArrayList<double[]> nonTrivialMatrix) {
		try {
			for (Cluster nextCluster : allClusters) {
				File fileOriginal;
				if (randomization) {
					fileOriginal = new File("Output/Random" + "/"
							+ thresholds.getAlgorithm() + "/"
							+ thresholds.getSensorName()
							+ thresholds.getMotifWidth() + "/" + "Clusters-Red"
							+ "/" + nextCluster.getClusterId() + ".txt");
					fileOriginal.getParentFile().mkdirs();
				} else {
					fileOriginal = new File("Output" + "/"
							+ thresholds.getAlgorithm() + "/"
							+ thresholds.getSensorName()
							+ thresholds.getMotifWidth() + "/" + "Clusters-Red"
							+ "/" + nextCluster.getClusterId() + ".txt");
					fileOriginal.getParentFile().mkdirs();
				}
				FileWriter csvOriginalStream = new FileWriter(fileOriginal);
				BufferedWriter csvOriginalOut = new BufferedWriter(
						csvOriginalStream);
				csvOriginalOut.write(nextCluster
						.getAllMemberCoordinatesOfNonTrivialMatrix());
				csvOriginalOut.close();
				csvOriginalStream.close();
			}
		} catch (IOException e) {
			e.printStackTrace();
		}

	}

	private String getRedSubsBirch(ArrayList<Integer> nextCluster) {
		String out = "";
		for (Integer originalMatIndex : nextCluster) {
			Integer nonTrivialIndex = JBirch.originalToNonTrivial
					.get(originalMatIndex);
			double[] aPoint = Cluster.nonTrivialMatrixHashMap
					.get(nonTrivialIndex);
			out += originalMatIndex + ",";
			for (double val : aPoint) {
				out += val + ",";
			}
			out += "\n";
		}
		return out;
	}

	private String getOriginalSubsBirch(ArrayList<Integer> nextCluster) {
		String out = "";
		for (Integer originalMatIndex : nextCluster) {
			double[] aPoint = Cluster.originalMatrixHashMap
					.get(originalMatIndex);
			for (double val : aPoint) {
				out += val + ",";
			}
			out += "\n";
		}
		return out;
	}

	public void printSingletonsIndex(ArrayList<Cluster> allClustersBST)
			throws IOException {
		PrintWriter statOut2;
		if (randomization) {
			statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
					"RandomSingletonsByBST.txt")));
		} else {
			statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
					"SingletonsByBST.txt")));
		}

		for (Cluster nextCluster : allClustersBST) {
			String memberString = nextCluster.getClusterMembersString();
			String[] member = memberString.split(",", 0);
			if (member.length == 1) {
				statOut2.write(member[0] + "\n");
			} else
				continue;
		}
		statOut2.close();
	}

	public void printSinletonsByLSH(HashSet<Integer> singletonSet)
			throws IOException {
		PrintWriter statOut2;
		if (randomization) {
			statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
					"RandomSingletonsByLSH.txt")));
		} else {
			statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
					"SingletonsByLSH.txt")));
		}
		Iterator<Integer> it2 = singletonSet.iterator();
		while (it2.hasNext()) {
			statOut2.println(it2.next());
		}
		statOut2.close();

	}

	public void printLSHStats(double precision, double recall, double f1score,
			ClusteringThresholds thresholds, int singletonsBST,
			int singletonsLSH) throws IOException {
		PrintWriter statOut2;
		if (randomization) {
			statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
					"RandomLSHStats.txt", true)));
		} else {
			statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
					"LSHStats.txt", true)));
		}

		statOut2.println(thresholds.getAlgorithm() + "\t"
				+ thresholds.getSensorName() + "\t"
				+ thresholds.getMotifWidth() + "\t" + precision + "\t" + recall
				+ "\t" + singletonsBST + "\t" + singletonsLSH);
		statOut2.close();
	}

	// Prints the Singletons Index of Coin-Birch

	public void printBirchSingletonsIndex(
			ArrayList<ArrayList<Integer>> birchClusters) throws IOException {
		PrintWriter statOut2;
		if (randomization) {
			statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
					"RandomSingletonsByBirch.csv")));
		} else {
			statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
					"SingletonsByBirch.csv")));
		}
		for (ArrayList<Integer> nextCluster : birchClusters) {
			if (nextCluster.size() <= Integer.valueOf(Messages
					.getString("PrepareData.minSupportLSH"))) {
				for (Integer i : nextCluster) {
					statOut2.println(i);
				}

			} else
				continue;
		}
		statOut2.close();

	}

	public ArrayList<Cluster> intersection(
			ArrayList<Cluster> clustersToCompareCoin,
			HashSet<Cluster> clustersToCompareLSH) {
		ArrayList<Cluster> list = new ArrayList<Cluster>();

		for (Cluster t : clustersToCompareCoin) {
			if (clustersToCompareLSH.contains(t)) {
				list.add(t);
			}
		}

		return list;
	}

	public void printLshClusteringScore(ClusteringThresholds thresholds,
			int nonTrivialMatSize, int isLshHasNearestClusterCount,
			int nearestClusterCount, int allClustersBSTSize,
			int totalClustersToCompareBST, int totalClustersToCompareLSH,
			int totalIntersectionCount, int intersectionHasNearestClusterCount,
			int noOfDiscLevels) throws IOException {

		File fileOriginal;
		if (randomization) {
			fileOriginal = new File("Output/Random" + "/"
					+ "LshClusteringStats.txt");
			fileOriginal.getParentFile().mkdirs();
		} else {
			fileOriginal = new File("Output" + "/" + "LshClusteringStats.txt");
			fileOriginal.getParentFile().mkdirs();
		}
		PrintWriter statOut2 = new PrintWriter(new BufferedWriter(
				new FileWriter(fileOriginal, true)));
		File file = new File(fileOriginal.getAbsolutePath());
		if (file.length() == 0) {
			statOut2.write("Algorithm" + "\t" + "sensorName" + "\t"
					+ "motifWidth" + "\t" + "nonTrivialMatSize" + "\t"
					+ "No. of Clusters" + "\t" + "Nearest Cluster Count" + "\t"
					+ "Disclevel Type" + "\t" + "noOfDiscLevels" + "\t"
					+ "Lsh Has Nearest Cluster Count" + "\t"
					+ "Clusters To Compare By BST" + "\t"
					+ "Clusters To Compare By LSH" + "\t" + "Intersection"
					+ "\t" + "Intersection Has Nearest Cluster Count" + "\n");
		}

		statOut2.close();

	}

	public void printData(ArrayList<double[]> nonTrivialMatrix)
			throws IOException {
		PrintWriter statOut2;
		if (randomization) {
			statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
					"RandomData.txt", true)));
		} else {
			statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
					"Data.txt", true)));
		}

		for (double[] aRow : nonTrivialMatrix) {
			for (int i = 2; i < aRow.length; i++) {
				statOut2.write(aRow[i] + "\n");
			}
		}
		statOut2.close();

	}

	// Prints the original matrix of the subsequences

	public void printOriginalMatrix(ClusteringThresholds thresholds,
			double[][] originalMatrix) throws IOException {
		File fileOriginal;
		if (randomization) {
			fileOriginal = new File("Output/Random" + "/"
					+ thresholds.getAlgorithm() + "/"
					+ thresholds.getSensorName() + thresholds.getMotifWidth()
					+ "/" + "OriginalMatrix.txt");
			fileOriginal.getParentFile().mkdirs();
		} else {
			fileOriginal = new File("Output" + "/" + thresholds.getAlgorithm()
					+ "/" + thresholds.getSensorName()
					+ thresholds.getMotifWidth() + "/" + "OriginalMatrix.txt");
			fileOriginal.getParentFile().mkdirs();
		}

		PrintWriter statOut2 = new PrintWriter(new BufferedWriter(
				new FileWriter(fileOriginal, false)));
		for (double[] aRow : originalMatrix) {
			for (int i = 0; i < aRow.length; i++) {
				statOut2.write(aRow[i] + ",");
			}
			statOut2.write("\n");
		}
		statOut2.close();
	}

	// Prints the Non-Trival Matrix of the Reduced Subsequences
	public void printNonTrivialMatrix(ClusteringThresholds thresholds,
			ArrayList<double[]> nonTrivialMatrix) throws IOException {
		File fileOriginal;
		if (randomization) {
			fileOriginal = new File("Output/Random" + "/"
					+ thresholds.getAlgorithm() + "/"
					+ thresholds.getSensorName() + thresholds.getMotifWidth()
					+ "/" + "NonTrivialMatrix.txt");
			fileOriginal.getParentFile().mkdirs();
		} else {
			fileOriginal = new File("Output" + "/" + thresholds.getAlgorithm()
					+ "/" + thresholds.getSensorName()
					+ thresholds.getMotifWidth() + "/" + "NonTrivialMatrix.txt");
			fileOriginal.getParentFile().mkdirs();
		}
		PrintWriter statOut2 = new PrintWriter(new BufferedWriter(
				new FileWriter(fileOriginal, false)));
		for (double[] aRow : nonTrivialMatrix) {
			for (int i = 0; i < aRow.length; i++) {
				statOut2.write(aRow[i] + ",");
			}
			statOut2.write("\n");
		}
		statOut2.close();
	}

	// Print Clusters with Original Subsequences from Time-series

	public void printBirchClusterOrgSubs(ClusteringThresholds thresholds,
			ArrayList<ArrayList<Integer>> birchClusters,
			ArrayList<double[]> nonTrivialMatrix) {
		try {
			int counter = 1;
			for (ArrayList<Integer> nextCluster : birchClusters) {
				File fileOriginal;
				if (randomization) {
					fileOriginal = new File("Output/Random" + "/"
							+ thresholds.getAlgorithm() + "/"
							+ thresholds.getSensorName()
							+ thresholds.getMotifWidth() + "/" + "Clusters-Org"
							+ "/" + counter + ".txt");
					fileOriginal.getParentFile().mkdirs();
				} else {
					fileOriginal = new File("Output" + "/"
							+ thresholds.getAlgorithm() + "/"
							+ thresholds.getSensorName()
							+ thresholds.getMotifWidth() + "/" + "Clusters-Org"
							+ "/" + counter + ".txt");
					fileOriginal.getParentFile().mkdirs();
				}

				FileWriter csvOriginalStream = new FileWriter(fileOriginal);
				BufferedWriter csvOriginalOut = new BufferedWriter(
						csvOriginalStream);
				csvOriginalOut.write(getOriginalSubsBirch(nextCluster));
				csvOriginalOut.close();
				csvOriginalStream.close();
				counter++;
			}
		} catch (IOException e) {
			e.printStackTrace();
		}

	}

	// Print Clusters with Reduced Dimension Subsequences
	public void printBirchClusterRedSubs(ClusteringThresholds thresholds,
			ArrayList<ArrayList<Integer>> birchClusters,
			ArrayList<double[]> nonTrivialMatrix) {
		try {
			int counter = 1;
			for (ArrayList<Integer> nextCluster : birchClusters) {
				File fileOriginal;
				if (randomization) {
					fileOriginal = new File("Output/Random" + "/"
							+ thresholds.getAlgorithm() + "/"
							+ thresholds.getSensorName()
							+ thresholds.getMotifWidth() + "/" + "Clusters-Red"
							+ "/" + counter + ".txt");
					fileOriginal.getParentFile().mkdirs();
				} else {
					fileOriginal = new File("Output" + "/"
							+ thresholds.getAlgorithm() + "/"
							+ thresholds.getSensorName()
							+ thresholds.getMotifWidth() + "/" + "Clusters-Red"
							+ "/" + counter + ".txt");
					fileOriginal.getParentFile().mkdirs();
				}
				FileWriter csvOriginalStream = new FileWriter(fileOriginal);
				BufferedWriter csvOriginalOut = new BufferedWriter(
						csvOriginalStream);
				csvOriginalOut.write(getRedSubsBirch(nextCluster));
				csvOriginalOut.close();
				csvOriginalStream.close();
				counter++;
			}
		} catch (IOException e) {
			e.printStackTrace();
		}

	}

	// Print Centroid ( Reduced Dimension )
	public void printCentroidsBirch(
			ArrayList<ArrayList<Integer>> birchClusters,
			ClusteringThresholds thresholds,
			ArrayList<double[]> nonTrivialMatrix,
			HashMap<Integer, Integer> originalToNonTrivial) throws IOException {

		int clusterNo = 1;
		File fileOriginal;
		if (randomization) {
			fileOriginal = new File("Output/Random" + "/"
					+ thresholds.getAlgorithm() + "/"
					+ thresholds.getSensorName() + thresholds.getMotifWidth()
					+ "/" + "Centroids.txt");
			fileOriginal.getParentFile().mkdirs();
		} else {
			fileOriginal = new File("Output" + "/" + thresholds.getAlgorithm()
					+ "/" + thresholds.getSensorName()
					+ thresholds.getMotifWidth() + "/" + "Centroids.txt");
			fileOriginal.getParentFile().mkdirs();
		}
		PrintWriter statOut2 = new PrintWriter(new BufferedWriter(
				new FileWriter(fileOriginal, false)));

		for (ArrayList<Integer> nextCluster : birchClusters) {
			double[] centroid = new double[thresholds.getReducedDim()];
			ArrayList<double[]> clusterPoints = new ArrayList<double[]>();
			for (Integer originalIndex : nextCluster) {
				int nonTrivialIndex = originalToNonTrivial.get(originalIndex);
				double[] aPoint = Cluster.nonTrivialMatrixHashMap
						.get(nonTrivialIndex);
				clusterPoints.add(aPoint);
			}
			centroid = GeneralUtils.getCentroid(clusterPoints);

			statOut2.write(clusterNo + "\t");
			for (double coordinate : centroid) {
				statOut2.write(coordinate + ",");
			}
			statOut2.write("\n");
			clusterNo++;
		}
		statOut2.close();
	}


	public void printNonTrivialSubs(
			HashMap<Integer, ArrayList<Integer>> clustersWithNonTrivialSubs,
			HashMap<Integer, ArrayList<Double>> originalMatrix,
			String outputPath,ClusteringThresholds thresholds) {
		boolean flag=thresholds.isDebug();
		if(flag == true)
		{
			for (Integer key : clustersWithNonTrivialSubs.keySet()) {
				ArrayList<Integer> subsIndex = clustersWithNonTrivialSubs.get(key);
				if (subsIndex.size() < setSupport) {
					continue;
				}
				File fileOriginal = new File(outputPath + "/" + "NonTrivial" + "/"
						+ "Motifs-Org" + "/" + key + ".txt");
				fileOriginal.getParentFile().mkdirs();
				PrintWriter statOut2;
				try {
					statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
							fileOriginal, false)));

					for (Integer eachIndex : subsIndex) {
						statOut2.write(eachIndex + ",");
						String subs = makeString(originalMatrix.get(eachIndex));
						statOut2.write(subs);
						statOut2.write("\n");
					}
					statOut2.close();
				} catch (IOException e) {
					e.printStackTrace();
				}

			}

		}

	}

	private String makeString(ArrayList<Double> arrayList) {
		String out = "";
		for (Double d : arrayList) {
			out += d + ",";
		}
		return out;
	}

	public void printNonTrivialClusteringIndex(
			HashMap<Integer, ArrayList<Integer>> clustersWithNonTrivialSubs,
			String outputPath) {
		File fileOriginal = new File(outputPath + "/" + "NonTrivial" + "/"
				+ "Clusters-Indexes.txt");
		fileOriginal.getParentFile().mkdirs();
		PrintWriter statOut2;
		try {
			statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
					fileOriginal, false)));
			for (Integer key : clustersWithNonTrivialSubs.keySet()) {
				ArrayList<Integer> subsIndex = clustersWithNonTrivialSubs
						.get(key);
				if (subsIndex.size() < setSupport) {
					continue;
				}
				statOut2.write(key + ",");
				for (Integer eachIndex : subsIndex) {
					statOut2.write(eachIndex + ",");
				}
				statOut2.write("\n");
			}
			statOut2.close();
		} catch (IOException e) {
			e.printStackTrace();
		}

	}

	public void printNonTrivialRedSubs(
			HashMap<Integer, ArrayList<Integer>> clustersWithNonTrivialSubs,
			HashMap<Integer, Integer> originalToNonTrivial,
			HashMap<Integer, ArrayList<Double>> nonTrivialMatrix,
			String outputPath,ClusteringThresholds thresholds) {
		boolean flag=thresholds.isDebug();
		if(flag == true)
		{
			for (Integer key : clustersWithNonTrivialSubs.keySet()) {
				ArrayList<Integer> subsIndex = clustersWithNonTrivialSubs.get(key);
				if (subsIndex.size() < setSupport) {
					continue;
				}
				File fileOriginal = new File(outputPath + "/" + "NonTrivial" + "/"
						+ "Motifs-Red" + "/" + key + ".txt");
				fileOriginal.getParentFile().mkdirs();
				PrintWriter statOut2;
				try {
					statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
							fileOriginal, false)));

					for (Integer eachIndex : subsIndex) {
						statOut2.write(eachIndex + ",");
						int nonTrivialIndex = originalToNonTrivial.get(eachIndex);
						String subs = makeString(nonTrivialMatrix
								.get(nonTrivialIndex));
						statOut2.write(subs);
						statOut2.write("\n");
					}
					statOut2.close();
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
		}


	}

	public void printRHPClusteringScore(ClusteringThresholds thresholds,
			int nonTrivialMatSize, int isLshHasNearestClusterCount,
			int nearestClusterCount, int allClustersBSTSize,
			int totalClustersToCompareBST, int totalClustersToCompareLSH,
			int totalIntersectionCount, int intersectionHasNearestClusterCount,
			double w) throws IOException {

		File fileOriginal;
		if (randomization) {
			fileOriginal = new File("Output/Random" + "/"
					+ "RhpClusteringStats.txt");
			fileOriginal.getParentFile().mkdirs();
		} else {
			fileOriginal = new File("Output" + "/" + "RhpClusteringStats.txt");
			fileOriginal.getParentFile().mkdirs();
		}
		PrintWriter statOut2 = new PrintWriter(new BufferedWriter(
				new FileWriter(fileOriginal, true)));
		File file = new File(fileOriginal.getAbsolutePath());
		if (file.length() == 0) {
			statOut2.write("Algorithm" + "\t" + "Sensor Name" + "\t"
					+ "Motif Width" + "\t" + "Non Trivial Mat Size" + "\t"
					+ "No. of Clusters" + "\t" + "Top-K" + "\t"
					+ "Range Filter" + "\t" + "Nearest Cluster Count" + "\t"
					+ "Lsh Has Nearest Cluster Count" + "\t"
					+ "Clusters To Compare By BST" + "\t"
					+ "Clusters To Compare By LSH" + "\t" + "Intersection"
					+ "\t" + "W" + "\t" + "r" + "\t" + "b" + "\t"
					+ "# Hash Functions" + "\n");
		}

		statOut2.close();
	}

	public void printNonTrivialCentorids(
			HashMap<Integer, ArrayList<Double>> centroids, String outputPath,ClusteringThresholds thresholds) {
		if(thresholds.isDebug()==true)
		{
			File fileOriginal = new File(outputPath + "/" + "NonTrivial" + "/"
					+ "Centroids.txt");
			fileOriginal.getParentFile().mkdirs();
			PrintWriter statOut2;
			try {
				statOut2 = new PrintWriter(new BufferedWriter(new FileWriter(
						fileOriginal, false)));

				for (Integer key : centroids.keySet()) {
					statOut2.write(key + "\t");
					String subs = makeString(centroids.get(key));

					statOut2.write(subs);
					statOut2.write("\n");
				}
				statOut2.close();
			} catch (IOException e) {
				e.printStackTrace();
			}
		}

	}
}
