package MultiLayerNeuralNetWorks;

import java.util.Random;
import java.util.function.DoubleFunction;

import static util.RandomGenerator.*;

import static util.ActivationFunction.*;

public class HiddenLayer {

	private int nIn;
	private int nOut;
	private double[][] w;
	private double[] b;

	private DoubleFunction<Double> activation;
	private DoubleFunction<Double> dactivation;

	public HiddenLayer(int nIn, int nOut, double[][] w, double[] b, Random rng, String activation) {

		if (rng == null)
			rng = new Random(1234); // seed random

		if (w == null) {

			w = new double[nOut][nIn];
			double w_ = 1. / nIn;

			for (int j = 0; j < nOut; j++) {
				for (int i = 0; i < nIn; i++) {
					w[j][i] = uniform(-w_, w_, rng); // initialize w with uniform distribution
				}
			}

		}

		if (b == null)
			b = new double[nOut];

		this.nIn = nIn;
		this.nOut = nOut;
		this.w = w;
		this.b = b;

		if (activation == "sigmoid" || activation == null) {

			this.activation = (double x) -> sigmoid(x);
			this.dactivation = (double x) -> dsigmoid(x);

		} else if (activation == "tanh") {

			this.activation = (double x) -> tanh(x);
			this.dactivation = (double x) -> dtanh(x);

		} else {
			throw new IllegalArgumentException("activation function not supported");
		}

	}

	public double[] output(double[] x) {

		double[] y = new double[nOut];

		for (int j = 0; j < nOut; j++) {
			double preActivation_ = 0.;

			for (int i = 0; i < nIn; i++) {
				preActivation_ += w[j][i] * x[i];
			}
			preActivation_ += b[j];

			y[j] = activation.apply(preActivation_);
		}

		return y;
	}

	public int[] outputBinomial(int[] x, Random rng) {

		int[] y = new int[nOut];

		double[] xCast = new double[x.length];
		for (int i = 0; i < xCast.length; i++) {
			xCast[i] = (double) x[i];
		}

		double[] out = output(xCast);

		for (int j = 0; j < nOut; j++) {
			y[j] = binomial(1, out[j], rng);
		}

		return y;
	}

	public double[] forward(double[] x) {
		return output(x);
	}

	public double[][] backward(double[][] X, double[][] Z, double[][] dY, double[][] wprev, int minibatchSize,
			double learningRate) {

		double[][] dZ = new double[minibatchSize][nOut]; // backpropagation error

		double[][] grad_w = new double[nOut][nIn];
		double[] grad_b = new double[nOut];

		// train with SGD
		// calculate backpropagation error to get gradient of w, b
		for (int n = 0; n < minibatchSize; n++) {

			for (int j = 0; j < nOut; j++) {

				for (int k = 0; k < dY[0].length; k++) { // k < ( nOut of previous layer )
					dZ[n][j] += wprev[k][j] * dY[n][k];
				}
				dZ[n][j] *= dactivation.apply(Z[n][j]);

				for (int i = 0; i < nIn; i++) {
					grad_w[j][i] += dZ[n][j] * X[n][i];
				}

				grad_b[j] += dZ[n][j];
			}
		}

		// update params
		for (int j = 0; j < nOut; j++) {
			for (int i = 0; i < nIn; i++) {
				w[j][i] -= learningRate * grad_w[j][i] / minibatchSize;
			}
			b[j] -= learningRate * grad_b[j] / minibatchSize;
		}

		return dZ;
	}

	public double[][] getW() {
		return w;
	}

	public double[] getB() {
		return b;
	}

	public int getIn() {
		return nIn;
	}

	public void setIn(int nIn) {
		this.nIn = nIn;
	}

	public int getOut() {
		return nOut;
	}

	public void setOut(int nOut) {
		this.nOut = nOut;
	}
}
