package de.in.tum.msspp.realPCA;

import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;

import au.com.bytecode.opencsv.CSVReader;

import Jama.EigenvalueDecomposition;
import Jama.Matrix;

public class PCA {

	final static int K = 5;
	
	Matrix covMatrix;
	EigenvalueDecomposition eigenstuff;
	
	double[] eigenvalues;
	Matrix eigenvectors;
	
	SortedSet<PrincipleComponent> principleComponents;
	public Matrix features;
	
	double[] means;
	double[][] input;
	int numComponents;
	
	public static void main(String[] args){
		if(args.length > 1)
		{
			File baseFile = new File(args[0]);//BaseSample, on which a new Space is created
			File toCompare = new File(args[1]);//CompareSample is projected into this space
			
			try 
			{
				//Read matrizes form csv and parse into the right format
				double[][] baseMatrix = readSamples(baseFile);
				double[][] compareMAtrix = readSamples(toCompare);
				
				//Do full PCA with the baseMatrix
				PCA basePCA = new PCA(baseMatrix);
				double[] baseMean = basePCA.getMeans();
				basePCA.calculate();
				
				//Extract Means of CompareSample
				PCA comparePCA = new PCA(compareMAtrix);
				double[] compareMean = comparePCA.getMeans();
				
				//Make Vectors of the means
				Matrix meanMatrixBase = new Matrix(baseMean, 1);
				Matrix meanMatrixCompare = new Matrix(compareMean, 1);
				
				//Transpose eingenvector matirx
				Matrix tFeatures = basePCA.features.transpose();
				
				//Project Mean-Vectors into baseSpace
				double[] baseRef = tFeatures.times(meanMatrixBase.transpose()).transpose().getArray()[0];//=feature matrix * meanvector
				double[] compareRef = tFeatures.times(meanMatrixCompare.transpose()).transpose().getArray()[0];
				
				//Check distance between baseSample and compareSample
				double sum = 0;
				for(int j = 0; j < baseRef.length; j++)
				{
					sum += Math.pow(baseRef[j] - compareRef[j],2.0);
				}
				
				double euklidDist = Math.sqrt(sum);
				bug("Euklid. distance: \n");
				bug(""+ euklidDist);
			} 
			catch (IOException e) {
				e.printStackTrace();
			}
		}
		
	}
	
	/**
	 * Create a new PCA with the given matrix
	 * @param input
	 */
	public PCA(double[][] input) 
	{
		this.input = input;
		means = new double[input[0].length];
		
		//Calculate Covariance Matrix
		double[][] cov = getCovariance(input, means);
		covMatrix = new Matrix(cov);
		
		//Calculate eigenvalues and vectors
		eigenstuff = covMatrix.eig();
		eigenvalues = eigenstuff.getRealEigenvalues();
		eigenvectors = eigenstuff.getV();
		
		//Prepare principleComponents for sorting
		double[][] vectors = eigenvectors.getArray();
		numComponents = eigenvectors.getRowDimension();
		
		principleComponents = new TreeSet<PrincipleComponent>();
		for (int i = 0; i < numComponents; i++) {
			double[] eigenvector = new double[numComponents];
			for (int j = 0; j < numComponents; j++) {
				eigenvector[j] = vectors[i][j];
			}
			principleComponents.add(new PrincipleComponent(eigenvalues[i],
					eigenvector));
		}
	}
	
	/**
	 * Reads a Sample into a given Pattern (2-time dimension)
	 * @param inFile
	 * @return prepared sample
	 * @throws IOException
	 */
	public static double[][] readSamples(File inFile) throws IOException 
	{
		int numberOfCharacters = 4;//Number of measured values
		int numberOfLogs = 29*60;//Number of all measures e.g. 29min * 60sek
		
		int rowSize = 10;//Number of seconds per row (pattern level)
		//e.g. 10 means 10 measures will be connected in one row

		int numberOfRows = numberOfLogs/rowSize;
		
		//ResultMatrix
		double[][] matrix = new double[numberOfRows][numberOfCharacters*rowSize];
			
		CSVReader reader = new CSVReader(new FileReader(inFile));
		String[] nextLine;
		
		// The first line doesnt contain real values, just Text-headers, so
		// we read it first
		// we also don't want the timestamp for our calculations 
		
		int row = 0;
		int ds = 0;
		
		nextLine = reader.readNext();
		while ((nextLine = reader.readNext()) != null) 
		{
			if(ds>=rowSize)
			{
				//If the row is full, switch to next row
				row++;
				ds=0;
			}	
			
			//-1 ignore timestamp
			for (int i = 0; i < nextLine.length-1; i++) 
			{
				//parse next value
				double value = Double.parseDouble(nextLine[i]);
				
				//find right position in row
				int column = i*rowSize + ds;
				
				matrix[row][column] = value;
			}
			ds++;
		}
		
		return matrix;
	}

	/**
	 * Mean values of the Matrix
	 * @return array of means
	 */
	public double[] getMeans() {
		return means;
	}
	
	/**
	 * Calculates the PCA
	 * @return projected sample matrix
	 */
	public double[][] calculate()
	{
		Matrix originalData = new Matrix(input);
		bug("Here is the original data before adjusting by each dimension mean:");
		originalData.print(8, 4);

		//Get the K main components
		List<PrincipleComponent> mainComponents = getDominantComponents(K);
		
		//Matrix with mainComponents (best eigenvectors)
		features = getDominantComponentsMatrix(mainComponents);

		//Transpose matrix
		Matrix featuresTransposed = features.transpose();
		bug("Transposed feature matrix (k=" + K + ") :");
		featuresTransposed.print(8, 4);

		//Project given Matrix into the new space
		double[][] matrixAdjusted = getMeanAdjusted(input, getMeans());
		Matrix adjustedInput = new Matrix(matrixAdjusted);
	
		Matrix transFormedData = featuresTransposed.times(adjustedInput.transpose());
		bug("Transformed data into PCA-space (k=" + K + ") :");
		Matrix result = transFormedData.transpose();
		
		result.print(8, 4);
		bug("*************************************************");
		
		return result.getArrayCopy();
	}

	/**
	 * Subtracts the mean value from each row. The means must be precomputed,
	 * which you get for free when you make a PCA instance (just call
	 * getMeans()).
	 * 
	 * @param input
	 *            Some data, where each row is a sample point, and each column
	 *            is a dimension.
	 * @param mean
	 *            The means of each dimension. This could be computed from
	 *            'input' directly, but for efficiency's sake, it should only be
	 *            done once and the result saved.
	 * @return Returns a translated matrix where each cell has been translated
	 *         by the mean value of its dimension.
	 */
	public static double[][] getMeanAdjusted(double[][] input, double[] mean) {
		int nRows = input.length;
		int nCols = input[0].length;
		double[][] ret = new double[nRows][nCols];
		for (int row = 0; row < nRows; row++) {
			for (int col = 0; col < nCols; col++) {
				ret[row][col] = input[row][col] - mean[col];
			}
		}
		return ret;
	}

	/**
	 * Returns the top n principle components in descending order of relevance.
	 */
	public List<PrincipleComponent> getDominantComponents(int n) {
		List<PrincipleComponent> retVal = new ArrayList<PrincipleComponent>();
		int count = 0;
		for (PrincipleComponent pc : principleComponents) {
			retVal.add(pc);
			count++;
			if (count >= n) {
				break;
			}
		}
		return retVal;
	}

	public static Matrix getDominantComponentsMatrix(
			List<PrincipleComponent> dom) {
		int nRows = dom.get(0).eigenVector.length;
		int nCols = dom.size();
		Matrix matrix = new Matrix(nRows, nCols);
		for (int col = 0; col < nCols; col++) {
			for (int row = 0; row < nRows; row++) {
				matrix.set(row, col, dom.get(col).eigenVector[row]);
			}
		}
		return matrix;
	}

	public int getNumComponents() {
		return eigenvalues.length;
	}

	/**
	 * Helperclass to order the eigenvectors
	 * @author development
	 *
	 */
	public static class PrincipleComponent implements
			Comparable<PrincipleComponent> {
		public double eigenValue;
		public double[] eigenVector;

		public PrincipleComponent(double eigenValue, double[] eigenVector) {
			this.eigenValue = eigenValue;
			this.eigenVector = eigenVector;
		}

		//Treeset orders ascending, so we must reverse the order by giving bigger items a smaller value
		public int compareTo(PrincipleComponent o) {
			int ret = 0;
			if (eigenValue > o.eigenValue) {
				ret = -1;
			} else if (eigenValue < o.eigenValue) {
				ret = 1;
			}
			return ret;
		}

		public String toString() {

			return "Principle Component, eigenvalue: " + eigenValue
					+ ", eigenvector: [" + printVector(eigenVector) + "]";

		}

		private String printVector(double[] eigenVector) {
			String vectorString = "";
			for (int i = 0; i < eigenVector.length; i++) {
				if (i != 0)
					vectorString = vectorString + ";";
				vectorString = vectorString + Double.toString(eigenVector[i]);
			}
			return vectorString;
		}
	}

	/**
	 * Calculates the covariance matrix
	 * @param input
	 * @param meanValues
	 * @return Covariance matrix
	 */
	public static double[][] getCovariance(double[][] input, double[] meanValues) {
		//Zeilen
		int numDataVectors = input.length;
		//Spalten
		int n = input[0].length;

		//calculate mean and compute new matrix by it
		double[] sum = new double[n];
		double[] mean = new double[n];
		for (int i = 0; i < numDataVectors; i++) {
			double[] vec = input[i];
			for (int j = 0; j < n; j++) {
				sum[j] = sum[j] + vec[j];
			}
		}
		for (int i = 0; i < sum.length; i++) {
			mean[i] = sum[i] / numDataVectors;
		}

		//calculate C:
		//meanfree input *  meanfree input^t
		double[][] retVal = new double[n][n];
		for (int i = 0; i < n; i++) {
			for (int j = i; j < n; j++) {
				double v = getCovariance(input, i, j, mean);
				retVal[i][j] = v;
				retVal[j][i] = v;
			}
		}
		if (meanValues != null) {
			System.arraycopy(mean, 0, meanValues, 0, mean.length);
		}
		return retVal;
	}

	/**
	 * Gives covariance between vectors in an n-dimensional space. The two input
	 * arrays store values with the mean already subtracted.
	 */
	private static double getCovariance(double[][] matrix, int colA, int colB,
			double[] mean) {
		double sum = 0;
		for (int i = 0; i < matrix.length; i++) {
			double v1 = matrix[i][colA] - mean[colA];
			double v2 = matrix[i][colB] - mean[colB];
			sum = sum + (v1 * v2);
		}
		int n = matrix.length;
		double retVal = (sum / (n - 1));
		return retVal;
	}
	
	/**
	 * Output helper
	 * @param what
	 */
	private static void bug(String what) {
		System.out.println(what);
	}
}
