package MultiLayerNeuralNetWorks;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Random;

import DataSet.MnistDataSet;
import singleLayerNeuralNetWorks.LogisticRegression;
import util.GaussianDistribution;
import util.Utils;

public class MultiLayerPerceptrons {

	private int nIn;
	private int nHiddenLayers;
	private int[] nHiddens;
	private int nOut;
	private HiddenLayer[] hiddenLayers;
	private LogisticRegression logisticLayer;
	private Random rng;

	public HiddenLayer[] getHiddenLayers() {
		return hiddenLayers;
	}

	public LogisticRegression getLogisticRegression() {
		return logisticLayer;
	}

	public MultiLayerPerceptrons(int nIn, int[] nHiddens, int nOut, String activationFunction, Random rng) {

		this.nIn = nIn;
		this.nHiddens = nHiddens;
		this.nHiddenLayers = nHiddens.length;
		this.nOut = nOut;

		if (rng == null) {
			rng = new Random(1234);
		}
		this.rng = rng;

		initLayers(activationFunction);

	}

	private void initLayers(String activationFunction) {
		// construct hidden layers with tanh as activation function
		hiddenLayers = new HiddenLayer[nHiddenLayers];
		for (int i = 0; i < nHiddenLayers; i++) {
			hiddenLayers[i] = new HiddenLayer(i == 0 ? nIn : nHiddens[i - 1], nHiddens[i], null, null, rng,
					activationFunction); // sigmoid or tanh
		}
		// construct output layer i.e. multi-class logistic layer
		logisticLayer = new LogisticRegression(nHiddens[nHiddenLayers - 1], nOut);

	}

	public void train(double[][] X, int T[][], int minibatchSize, double learningRate) {

		double[][][] Z = new double[nHiddenLayers][minibatchSize][nIn]; // outputs of hidden layer (= inputs of output
																		// layer)

		double[][][] dZ = new double[nHiddenLayers + 1][][];

		// forward hidden layer
		for (int n = 0; n < minibatchSize; n++) {
			Z[0][n] = hiddenLayers[0].forward(X[n]); // activate input units
			for (int k = 1; k < nHiddenLayers; k++) {
				Z[k][n] = hiddenLayers[k].forward(Z[k - 1][n]);
			}
		}

		// forward & backward output layer
		dZ[nHiddenLayers] = logisticLayer.train(Z[nHiddenLayers - 1], T, minibatchSize, learningRate);

		// backward hidden layer (backpropagate)
		for (int i = nHiddenLayers - 1; i >= 0; i--) {
			double[][] wprev = (i == nHiddenLayers - 1) ? logisticLayer.getW() : hiddenLayers[i + 1].getW();
			double[][] zprev = (i == 0) ? X : Z[i - 1];
			int batchSize = minibatchSize;
			dZ[i] = hiddenLayers[i].backward(zprev, Z[i], dZ[i + 1], wprev, batchSize, learningRate);
		}
	}

	public Integer[] predict(double[] x) {
		double[] z = hiddenLayers[0].output(x);
		for (int i = 1; i < nHiddenLayers; i++) {
			z = hiddenLayers[i].output(z);
		}
		return logisticLayer.predict(z);
	}

	public static void main(String[] args) throws Exception {

		final Random rng = new Random(123); // seed random
		final int[] nHiddens = new int[] { 20 };

		//
		// Declare variables and constants
		//
		/*
		  final int patterns = 2; 
		  final int train_N = 4; 
		  final int test_N = 4; 
		  final int nIn = 2; 
		  final int nOut = patterns;
		  
		  double[][] train_X; 
		  int[][] train_T;
		  
		  double[][] test_X; 
		  Integer[][] test_T; 
		  Integer[][] predicted_T = new
		  Integer[test_N][nOut];
		  
		  final int epochs = 5000; 
		  double learningRate = 0.1;
		  
		  final int minibatchSize = 1; // here, we do on-line training 
		  int minibatch_N = train_N / minibatchSize;
		  
		  double[][][] train_X_minibatch = new double[minibatch_N][minibatchSize][nIn];
		  int[][][] train_T_minibatch = new int[minibatch_N][minibatchSize][nOut];
		  List<Integer> minibatchIndex = new ArrayList<>(); 
		  for (int i = 0; i <train_N; i++) { 
		  	minibatchIndex.add(i); 
		  } 
		  Collections.shuffle(minibatchIndex, rng);
		  
		  // Training simple XOR problem for demo 
		  // class 1 : [0, 0], [1, 1] -> Negative [0, 1] 
		  // class 2 : [0, 1], [1, 0] -> Positive [1, 0] 
		  //
		  
		  train_X = new double[][] { { 0., 0. }, { 0., 1. }, { 1., 0. }, { 1., 1. } };
		  train_T = new int[][] { { 0, 1 }, { 1, 0 }, { 1, 0 }, { 0, 1 } }; 
		  test_X =  new double[][] { { 0., 0. }, { 0., 1. }, { 1., 0. }, { 1., 1. } }; 
		  test_T =  new Integer[][] { { 0, 1 }, { 1, 0 }, { 1, 0 }, { 0, 1 } };
		  
		 */
		/*
		SimpleDateFormat myFmt = new SimpleDateFormat("HH:mm:ss.SSS");
		
		final int patterns = 15; // number of classes
		final int nIn = 16;
		final int nOut = patterns;
		
		final int np = 10000;
		final int np_T = 1200;
		final int train_N = np * patterns;
		final int test_N = np_T * patterns;
		
		double[][] train_X = new double[train_N][nIn];
		int[][] train_T = new int[train_N][nOut];
		
		double[][] test_X = new double[test_N][nIn];
		Integer[][] test_T = new Integer[test_N][nOut];
		Integer[][] predicted_T = new Integer[test_N][nOut];
		
		int epochs = 100;
		double learningRate = 0.5;
		
		int minibatchSize = 10000; // number of data in each minibatch
		int minibatch_N = train_N / minibatchSize; // number of minibatches
		
		double[][][] train_X_minibatch = new double[minibatch_N][minibatchSize][nIn]; // minibatches of training data
		int[][][] train_T_minibatch = new int[minibatch_N][minibatchSize][nOut]; // minibatches of output data for
		
		System.out.println("Preparing data:" + myFmt.format(new Date()));
		
		List<Integer> minibatchIndex = new ArrayList<>(); // data index for minibatch to apply SGD
		for (int i = 0; i < train_N; i++) {
		    minibatchIndex.add(i);
		}
		Collections.shuffle(minibatchIndex, rng); // shuffle data index for SGD
		
		int gs_N=Math.max(nIn, nOut);
		GaussianDistribution[] gs = new GaussianDistribution[gs_N];
		double delta = gs_N * 2 / (gs_N - 1.0);
		for (int i = 0; i < gs_N; i++) {
		    gs[i] = new GaussianDistribution(-gs_N + i * delta, 1.0, rng);
		}
		
		for (int k = 0; k < patterns; k++) {
		    for (int i = k * np; i < (k + 1) * np; i++) {
		        for (int m = 0; m < nOut; m++) {
		            train_T[i][m] = m == k ? 1 : 0;
		        }
		        for (int m = 0; m < nIn; m++) {
		            train_X[i][m] = gs[(k + m) % gs_N].random();
		        }
		    }
		    for (int i = k * np_T; i < (k + 1) * np_T; i++) {
		        for (int m = 0; m < nOut; m++) {
		            test_T[i][m] = m == k ? 1 : 0;
		        }
		        for (int m = 0; m < nIn; m++) {
		            test_X[i][m] = gs[(k + m) % gs_N].random();
		        }
		    }
		}
		
		*/
		SimpleDateFormat myFmt = new SimpleDateFormat("HH:mm:ss.SSS");
		System.out.println("Preparing Data...");
		MnistDataSet ds = new MnistDataSet("E:\\ai.projects\\mnist");
		System.out.println("Loading MNist:" + myFmt.format(new Date()));
		ds.load_mnist();

		System.out.println("Formatting DataSet for MLP classifier:" + myFmt.format(new Date()));
		final int patterns = ds.getLabels_N(); 	// 4; // number of classes 
		final int nIn = ds.getImageDataLen(); 	// 5; 
		final int nOut = patterns;

		final int train_N = ds.getTrain_N(); 	// np * patterns; 
		final int test_N = ds.getTest_N(); 		// np_T * patterns;

		double[][] train_X = ds.getTrainDoubleImages(); 	// new double[train_N][nIn];
		int[][] train_T = ds.getCrossEntropyTrainLabels(); 	// new int[train_N][nOut];

		double[][] test_X = ds.getTestDoubleImages(); 			// new double[test_N][nIn];
		Integer[][] test_T = ds.getCrossEntropyTestLabels(); 	// new Integer[test_N][nOut]; 
		Integer[][] predicted_T = new Integer[test_N][nOut];

		int epochs = 20;
		double learningRate = 0.9;
		System.out.println("Creating minibatch:" + myFmt.format(new Date()));
		int minibatchSize = 1000; 					// number of data in each minibatch 
		int minibatch_N = train_N / minibatchSize; 	// number of minibatches

		double[][][] train_X_minibatch = new double[minibatch_N][minibatchSize][nIn];
		// minibatches of training data 
		// minibatches of output data for training
		int[][][] train_T_minibatch = new int[minibatch_N][minibatchSize][nOut];  
		List<Integer> minibatchIndex = new ArrayList<>(); // data index for minibatch to apply SGD 
		for (int i = 0; i < train_N; i++) {
			minibatchIndex.add(i);
		}
		Collections.shuffle(minibatchIndex, rng); // shuffle  data index for SGD

		// create minibatches
		System.out.println("Create minibatches:" + myFmt.format(new Date()));
		for (int i = 0; i < minibatch_N; i++) {
			for (int j = 0; j < minibatchSize; j++) {
				train_X_minibatch[i][j] = train_X[minibatchIndex.get(i * minibatchSize + j)];
				train_T_minibatch[i][j] = train_T[minibatchIndex.get(i * minibatchSize + j)];
			}
		}

		//
		// Build Multi-Layer Perceptrons model
		//

		// construct
		System.out.println("Construct classifier:" + myFmt.format(new Date()));
		MultiLayerPerceptrons classifier = new MultiLayerPerceptrons(nIn, nHiddens, nOut, "tanh", rng);

		// train
		Date begin = new Date();
		System.out.println("Training:" + myFmt.format(new Date()));
		for (int epoch = 0; epoch < epochs; epoch++) {
			if (epoch % 100 == 0 && epoch > 0)
				System.out.println("");
			System.out.print(".");
			for (int batch = 0; batch < minibatch_N; batch++) {
				classifier.train(train_X_minibatch[batch], train_T_minibatch[batch], minibatchSize, learningRate);
			}
		}
		System.out.println("");

		long duration = (new Date()).getTime() - begin.getTime();
		// test
		for (int i = 0; i < test_N; i++) {
			predicted_T[i] = classifier.predict(test_X[i]);
		}

		//
		// Evaluate the model
		//

		int[][] confusionMatrix = new int[patterns][patterns];
		double accuracy = 0.;
		double[] precision = new double[patterns];
		double[] recall = new double[patterns];

		for (int i = 0; i < test_N; i++) {
			int predicted_ = Arrays.asList(predicted_T[i]).indexOf(1);
			int actual_ = Arrays.asList(test_T[i]).indexOf(1);

			confusionMatrix[actual_][predicted_] += 1;
		}

		for (int i = 0; i < patterns; i++) {
			double col_ = 0.;
			double row_ = 0.;

			for (int j = 0; j < patterns; j++) {

				if (i == j) {
					accuracy += confusionMatrix[i][j];
					precision[i] += confusionMatrix[j][i];
					recall[i] += confusionMatrix[i][j];
				}

				col_ += confusionMatrix[j][i];
				row_ += confusionMatrix[i][j];
			}
			precision[i] /= col_;
			recall[i] /= row_;
		}

		accuracy /= test_N;

		System.out.printf("Time consumed by training: %d ms\n", duration);
		System.out.printf("Epoch: %d\n", epochs);

		System.out.println("--------------------");
		System.out.println("MLP model evaluation");
		System.out.println("------------------------------------");
		System.out.println("Class\t\tAccuracy\tPrecision\tRecall");
		for (int i = 0; i < patterns; i++) {
			System.out.printf("Class%d\t\t%.1f\t\t%.1f\t\t%.1f %%\n", 
					i + 1, 
					accuracy * 100, 
					precision[i] * 100,
					recall[i] * 100);
		}

	}
}
