package singleLayerNeuralNetWorks;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Random;

import DataSet.MnistDataSet;
import util.ActivationFunction;
import util.GaussianDistribution;
//import util.GaussianDistribution;
import util.Utils;

public class LogisticRegression {
	private int nIn, nOut;
	private double[][] w;
	private double[] b;

	public LogisticRegression(int nIn, int nOut) {
		this.nIn = nIn;
		this.nOut = nOut;
		this.w = new double[nOut][nIn];
		this.b = new double[nOut];
	}

	public double[][] train(double[][] x, int[][] t, int minibatchSize, double learningRate) {
		double[][] grad_w = new double[nOut][nIn];
		double[] grad_b = new double[nOut];
		double[][] dY = new double[minibatchSize][nOut];

		// train with SGD
		// 1. calculate gradient of W, b
		for (int n = 0; n < minibatchSize; n++) {

			double[] predicted_Y_ = output(x[n]);

			for (int k = 0; k < nOut; k++) {
				dY[n][k] = predicted_Y_[k] - t[n][k];

				for (int i = 0; i < nIn; i++) {
					grad_w[k][i] += dY[n][k] * x[n][i];
				}

				grad_b[k] += dY[n][k];
			}
		}
		// 2. update params
		for (int k = 0; k < nOut; k++) {
			for (int i = 0; i < nIn; i++) {
				w[k][i] -= learningRate * grad_w[k][i] / minibatchSize;
			}
			b[k] -= learningRate * grad_b[k] / minibatchSize;
		}

		return dY;
	}

	public double[] output(double[] x) {
		double[] preActivation = new double[nOut];
		for (int k = 0; k < nOut; k++) {
			for (int i = 0; i < nIn; i++) {
				preActivation[k] += w[k][i] * x[i];
			}
			preActivation[k] += b[k];
		}

		return ActivationFunction.softmax(preActivation, nOut);
	}

	public Integer[] predict(double[] x) {

		double[] y = output(x); // activate input data through learned networks
		Integer[] t = new Integer[nOut]; // output is the probability, so cast it to label

		int argmax = -1;
		double max = 0.;

		for (int i = 0; i < nOut; i++) {
			if (max < y[i]) {
				max = y[i];
				argmax = i;
			}
		}

		for (int i = 0; i < nOut; i++) {
			if (i == argmax) {
				t[i] = 1;
			} else {
				t[i] = 0;
			}
		}

		return t;
	}

	public double[][] getW() {
		return w;
	}

	public double[] getB() {
		return b;
	}

	public double getW(int k, int i) {
		return w[k][i];
	}

	public double getB(int k) {
		return b[k];
	}

	public static void main(String[] args) throws Exception {

		final Random rng = new Random(1234); // seed random

		//
		// Declare variables and constants
		//
		SimpleDateFormat myFmt = new SimpleDateFormat("HH:mm:ss.SSS");
		System.out.println("Preparing Data...");
		MnistDataSet ds = new MnistDataSet("E:\\ai.projects\\mnist");
		System.out.println("Loading MNist:" + myFmt.format(new Date()));
		ds.load_mnist();

		System.out.println("Formatting DataSet for logisticRegression classifier:" + myFmt.format(new Date()));
		final int patterns = ds.getLabels_N(); // 4; // number of classes
		final int nIn = ds.getRows() * ds.getCols(); // 5;
		final int nOut = patterns;

		// final int np = 10000;
		// final int np_T = 600;
		final int train_N = ds.getTrain_N(); // np * patterns;
		final int test_N = ds.getTest_N(); // np_T * patterns;

		double[][] train_X = ds.getTrainDoubleImages(); 	// new double[train_N][nIn];
		int[][] train_T = ds.getCrossEntropyTrainLabels(); 	// new int[train_N][nOut];

		double[][] test_X = ds.getTestDoubleImages(); // new double[test_N][nIn];
		Integer[][] test_T = ds.getCrossEntropyTestLabels(); // new Integer[test_N][nOut];
		Integer[][] predicted_T = new Integer[test_N][nOut];

		int epochs = 400;
		double learningRate = 0.45;

		System.out.println("Creating minibatch:" + myFmt.format(new Date()));
		int minibatchSize = 600; // number of data in each minibatch
		int minibatch_N = train_N / minibatchSize; // number of minibatches

		double[][][] train_X_minibatch = new double[minibatch_N][minibatchSize][nIn]; // minibatches of training data
		int[][][] train_T_minibatch = new int[minibatch_N][minibatchSize][nOut]; // minibatches of output data for
		                                                                         // training
		List<Integer> minibatchIndex = new ArrayList<>(); // data index for minibatch to apply SGD
		for (int i = 0; i < train_N; i++) {
			minibatchIndex.add(i);
		}
		Collections.shuffle(minibatchIndex, rng); // shuffle data index for SGD

		GaussianDistribution[] gs = new GaussianDistribution[nOut];
		double delta = nOut * 2 / (nOut - 1.0);
		for (int i = 0; i < nOut; i++) {
			gs[i] = new GaussianDistribution(-5.0 + i * delta, 1.0, rng);
		}

		/*
		for (int k = 0; k < patterns; k++) {
			for (int i = k * np; i < (k + 1) * np; i++) {
				for (int m = 0; m < nOut; m++) {
					train_T[i][m] = m == k ? 1 : 0;
				}
				for (int m = 0; m < nIn; m++) {
					train_X[i][m] = gs[(k + m) % nOut].random();
				}
			}
			for (int i = k * np_T; i < (k + 1) * np_T; i++) {
				for (int m = 0; m < nOut; m++) {
					test_T[i][m] = m == k ? 1 : 0;
				}
				for (int m = 0; m < nIn; m++) {
					test_X[i][m] = gs[(k + m) % nOut].random();
				}
			}
		}
		*/
		// create minibatches with training data
		for (int i = 0; i < minibatch_N; i++) {
			for (int j = 0; j < minibatchSize; j++) {
				train_X_minibatch[i][j] = train_X[minibatchIndex.get(i * minibatchSize + j)];
				train_T_minibatch[i][j] = train_T[minibatchIndex.get(i * minibatchSize + j)];
			}
		}

		//
		// Build Logistic Regression model
		//

		// construct logistic regression
		LogisticRegression classifier = new LogisticRegression(nIn, nOut);

		// train
		Date begin = new Date();
		System.out.println("Training:" + myFmt.format(new Date()));

		for (int epoch = 0; epoch < epochs; epoch++) {
			if (epoch % 100 == 0 && epoch > 0)
				System.out.println("");
			System.out.print(".");
			for (int batch = 0; batch < minibatch_N; batch++) {
				classifier.train(train_X_minibatch[batch], train_T_minibatch[batch], minibatchSize, learningRate);
			}
			learningRate *= 0.95;
		}
		System.out.println("");
		long duration = new Date().getTime() - begin.getTime();

		// test
		System.out.println("Testing:" + myFmt.format(new Date()));

		for (int i = 0; i < test_N; i++) {
			predicted_T[i] = classifier.predict(test_X[i]);
		}

		//
		// Evaluate the model
		//

		int[][] confusionMatrix = new int[patterns][patterns];
		double accuracy = 0.;
		double[] precision = new double[patterns];
		double[] recall = new double[patterns];

		for (int i = 0; i < test_N; i++) {
			int predicted_ = Arrays.asList(predicted_T[i]).indexOf(1);
			int actual_ = Arrays.asList(test_T[i]).indexOf(1);

			confusionMatrix[actual_][predicted_] += 1;
		}

		for (int i = 0; i < patterns; i++) {
			double col_ = 0.;
			double row_ = 0.;

			for (int j = 0; j < patterns; j++) {

				if (i == j) {
					accuracy += confusionMatrix[i][j];
					precision[i] += confusionMatrix[j][i];
					recall[i] += confusionMatrix[i][j];
				}

				col_ += confusionMatrix[j][i];
				row_ += confusionMatrix[i][j];
			}
			precision[i] /= col_;
			recall[i] /= row_;
		}

		accuracy /= test_N;
		System.out.printf("Time consumed by training: %d ms\n", duration);
		System.out.printf("Epoch: %d\n", epochs);

		System.out.println("------------------------------------");
		System.out.println("Logistic Regression model evaluation");
		System.out.println("------------------------------------");
		System.out.println("Class\t\tAccuracy\tPrecision\tRecall");
		for (int i = 0; i < patterns; i++) {
			System.out.printf("Class%d\t\t%.1f\t\t%.1f\t\t%.1f %%\n", i + 1, accuracy * 100, precision[i] * 100,
			        recall[i] * 100);
		}

		System.out.println("\nCoefficients:");
		System.out.print("Class\t\t");
		for (int m = 0; m < nIn; m++) {
			System.out.printf("w%d\t", m + 1);
		}
		System.out.println("d");
		System.out.println(Utils.repeatedString("-", (nIn + 2) * 8 + 4));
		for (int i = 0; i < nOut; i++) {
			System.out.printf("Class%d\t\t", i + 1);
			for (int m = 0; m < nIn; m++) {
				System.out.printf("%.1f\t", classifier.getW(i, m));
			}
			System.out.printf("%.1f\n", classifier.getB(i));
		}
	}
}
