package mllib;


import common.ConstVars;

import Jama.EigenvalueDecomposition;
import Jama.Matrix;
import org.apache.commons.math.linear.Array2DRowRealMatrix;
import org.apache.commons.math.linear.ArrayRealVector;
import org.apache.commons.math.linear.RealMatrix;
import org.apache.commons.math.linear.RealVector;

import java.io.*;
import java.util.*;

public class LDA implements Reducer 
{
    public RealMatrix wTrans;
    private RealVector sampleMean;
    private double[] eigvals;


    //Constructor to get wTrans and sampleMean from saved datafile
    public LDA(String filename)
    {
        try {
          Scanner sc = new Scanner(new File(filename));
          int dim = sc.nextInt();
          int newDim = sc.nextInt();
          double[][] w = new double[newDim][dim];
          double[] sm = new double[dim];
          for(int i = 0; i < newDim; i++)
              for(int j = 0; j< dim; j++)
                  w[i][j] = sc.nextDouble();
          
          for(int i = 0; i < dim; i ++)
              sm[i] = sc.nextDouble();

          wTrans = new Array2DRowRealMatrix(w);
          sampleMean = new ArrayRealVector(sm);
        }
        catch(FileNotFoundException e) {
          e.printStackTrace();
        }

    }

    public double[] getEigenValues()
    {
        return eigvals;
    }

    //Prints the information necessary for this LDA to be rebuilt from a file
    public void toFile(String filename)
    {
      try {
        	BufferedWriter out = new BufferedWriter(new FileWriter(filename));
            int dim = wTrans.getColumnDimension();
            int newDim = wTrans.getRowDimension();
            out.write( dim + "\n");
            out.write(newDim + "\n");
            for( int i = 0 ; i < newDim; i++)
                for(int j = 0; j < dim; j++)
                    out.write( wTrans.getEntry(i,j) + "\n" );
            for(int i = 0; i < dim; i++)
                    out.write( sampleMean.getEntry(i) + " ");
            out.close();                    
      }
      catch(IOException e) {}

    }
    
    //calculates within-class scatter
    private RealMatrix calcSw(TrainingSet ts, String[] names, HashMap<String, RealVector> means)
    {
        RealMatrix sw = new Array2DRowRealMatrix(ts.dimension(), ts.dimension());
        for( String n: means.keySet())
        {
            for( int i = 0; i < ts.sampleSize(); i++)
            {
                if( n.equals(names[i]) )
                {
                    RealVector diff = ts.getSample(i).subtract( means.get(n) );
                    sw = sw.add( diff.outerProduct(diff) );
                }
            }
        }
        return sw;  
    }

    //calculates between-class scatter
    private RealMatrix calcSb(TrainingSet ts, String[] names, HashMap<String, RealVector> means)
    {
        RealMatrix sb = new Array2DRowRealMatrix(ts.dimension(), ts.dimension() );
        RealVector totalMean = new ArrayRealVector( ts.dimension());
    
        //calculate mean of means
        for( String s : means.keySet())
            totalMean = totalMean.add( means.get(s) );
        totalMean = totalMean.mapDivideToSelf(means.size());

        for(String s : means.keySet())
        {
            RealVector diff = means.get(s).subtract( totalMean );
            sb = sb.add( diff.outerProduct(diff) );
        }    

        return sb.scalarMultiply( 1 / means.size() );

    }
    public LDA( TrainingSet ts, String[] names, double percent ) 
    {
        //Get sample mean and center data about origin
        sampleMean = ts.centerToOrigin();

        //Calculate all the means for each class
        HashMap<String,Integer> classCounters = new HashMap<String, Integer>();
        for(int i = 0;i < names.length;i++) 
        {
            if( classCounters.containsKey(names[i]) )
                classCounters.put( names[i], classCounters.get(names[i]) );
            else
                classCounters.put(names[i], 1);
            //classCounters.put( names[i],  (classCounters.containsKey(names[i]) ? classCounters.get(names[i])+1 : 1) ); 
        }
        //Calculate means per class
        HashMap<String, RealVector> classMeans = new HashMap<String, RealVector>();
        for( String n : classCounters.keySet())
        {
            RealVector m = new ArrayRealVector( ts.dimension() );
            for(int i = 0; i < ts.sampleSize(); i++) 
            {
                if( n.equals( names[i]) )
                    m = m.add( ts.getSample(i) );
            }
            m = m.mapDivideToSelf( 1 / classCounters.get(n) );
            classMeans.put(n, m);
        }    
        

        System.out.println("Starting Sw");
        RealMatrix Sw = calcSw(ts, names, classMeans);
        System.out.println("Starting Sb");
        RealMatrix Sb = calcSb(ts, names, classMeans);

        //Do decomposition to get eigenvalues and eigenvectors
        System.out.println("Starting SwInvSb calculation");
        Matrix SwInvSb = new Matrix(Sw.inverse().multiply(Sb).getData());
        System.out.println("Starting eigenvalue decomposition");
        EigenvalueDecomposition decomp = new EigenvalueDecomposition(SwInvSb);
        System.out.println("Starting eigenvalue fetching");
        eigvals = decomp.getRealEigenvalues();
        System.out.println("Starting eigenvector fetching");
        Matrix tempwTrans = decomp.getV();

        //Find number of dimensions (k) to keep
        System.out.println("Starting POV");
        int k = POV(eigvals, percent);

        //Get the top k eigenvectors and normalize them. The decomposition 
        //sorts the eigenvectors by increasing eigenvalues, so we want the last
        //k eigenvectors.
        System.out.println("Starting pruning irrelevant eigenvectors");
        tempwTrans = tempwTrans.getMatrix(tempwTrans.getRowDimension() - k,
                tempwTrans.getRowDimension() - 1,
                0,
                tempwTrans.getColumnDimension() - 1);

        System.out.println("Surely we're not stuck here, right?");
        wTrans = new Array2DRowRealMatrix( tempwTrans.getArray() );
        //normalizeW();
    }

    //Normalizes the eigenvectors to ensure that the projections are not distorted
    private void normalizeW()
    {
        for( int k = 0; k < wTrans.getRowDimension(); k++)
        {
            double sum = 0;
            for(int c = 0; c < wTrans.getColumnDimension(); c++)
                sum += wTrans.getEntry(k,c) * wTrans.getEntry(k,c);
            sum = Math.sqrt(sum);
            for(int c = 0; c < wTrans.getColumnDimension(); c++)
                wTrans.setEntry(k,c,    wTrans.getEntry(k,c)/sum);
        }
    }

    //Projects Vector point into the k-dimension subspace defined by wTrans
    public RealVector projectPoint( RealVector point) throws IllegalArgumentException
    {
        //System.out.print( "Point going to : ");
        //System.out.println( wTrans.operate( point.subtract(sampleMean) ).getEntry(0));
        return wTrans.operate(point.subtract(sampleMean));
    }

    //Returns k, the number of dimensions LDA reduced down to
    public int reducedDimension()
    {
        return wTrans.getRowDimension();
    }

    //Calculates the number of dimensions needed to have a proportion of 
    //variance (POV) at least "percent"
    private int  POV( double[] evalues, double percent)
    {
        double sum = 0;
        for( int i = evalues.length-1; i >= 0; i--)
            sum += evalues[i];
        
        int counter = 0;
        double partialSum = 0;
        
        while( partialSum / sum < percent)
        {
            partialSum += evalues[evalues.length - counter - 1];
            counter++;
        }

        System.err.println("k: " + counter + "");
        System.err.println("percent: " + partialSum / sum);
        return counter;
    }
}
