package controler.algorithms.nnbp;

import java.util.Random;

/** 
 * @file NeuralNetwork.java  
 * 
 **/

/**
 *    Implementacja interfejsu sieci neuronowej.
 *  Klasa implementujaca siec neuronowa.
 */

public class NeuralNetwork implements INeuralNetwork {
	
	private int[] layersAndNeurons;
    private double learningFactor;
    private double momentum;
    private double[][] outputs;
    private double[][][] weights;
    private double[][] errors;
    private double[][][] previousWeights;
    
    private Random r = new Random();

    /**
     *   Konstruktor tworzacy siec.
     * 
     * Konstruktor tworzy siec wedlug okreslonych kryteriow.
     * @param lan tablica definiujaca ilosc warstw i ilosc neuronow w kazdej warstwie.
     * @param lf wspolczynnik uczenia.
     * @param m momentum.
     */
    public NeuralNetwork(int[] lan, double lf, double m)
    {
        this.layersAndNeurons = lan;
        this.learningFactor = lf;
        this.momentum = m;

        outputs = new double[layersAndNeurons.length][];
        for (int i = 0; i < layersAndNeurons.length; i++)
            outputs[i] = new double[layersAndNeurons[i]];

        errors = new double[layersAndNeurons.length][];
        for (int i = 1; i < layersAndNeurons.length; i++)
            errors[i] = new double[layersAndNeurons[i]];

        weights = new double[layersAndNeurons.length][][];
        for (int i = 1; i < layersAndNeurons.length; i++)
            weights[i] = new double[layersAndNeurons[i]][];
        for (int i = 1; i < layersAndNeurons.length; i++)
        {
            for (int j = 0; j < layersAndNeurons[i]; j++)
                weights[i][j] = new double[layersAndNeurons[i - 1] + 1];
        }

        previousWeights = new double[layersAndNeurons.length][][];
        for (int i = 1; i < layersAndNeurons.length; i++)
            previousWeights[i] = new double[layersAndNeurons[i]][];
        for (int i = 1; i < layersAndNeurons.length; i++)
        {
            for (int j = 0; j < layersAndNeurons[i]; j++)
                previousWeights[i][j] = new double[layersAndNeurons[i - 1] + 1];
        }

        for (int i = 1; i < layersAndNeurons.length; i++)
        {
            for (int j = 0; j < layersAndNeurons[i]; j++)
            {
                for (int k = 0; k < weights[i][j].length; k++)
                    weights[i][j][k] = ((double)(r.nextDouble() - 0.5));
            }
        }

        for (int i = 1; i < layersAndNeurons.length; i++)
        {
            for (int j = 0; j < layersAndNeurons[i]; j++)
            {
                for (int k = 0; k < previousWeights[i][j].length; k++)
                    previousWeights[i][j][k] = 0.0;
            }
        }
    }
    
    /**
     *   Zwraca wartosc wspolczynnika uczenia.
     * 
     * Metoda zwracajaca aktualny wspolczynnik uczenia.
     * @return Aktualny wspolczynnik uczenia.
     */
    public double getLearningRate()
    {
    	return this.learningFactor;
    }
    
    
    /**
     *   Ustawia wartosc wspolczynnika uczenia.
     * 
     * Metoda ustawiajaca aktualny wspolczynnik uczenia.
     * @param lf ustawiany wspolczynnik uczenia.
     */
    public void setLearningRate(double lf)
    {
    	this.learningFactor = lf;
    }

    private double sigmoid(double x)
    {
        return (double)(1 / (1 + Math.exp(-1*x)));
    }

    public void execution(double[] in)
    {
        double active;

        for (int i = 0; i < layersAndNeurons[0]; i++)
            outputs[0][i] = in[i];

        for (int i = 1; i < layersAndNeurons.length; i++)
        {
            for (int j = 0; j < layersAndNeurons[i]; j++)
            {
                active = 0.0;

                for (int k = 0; k < layersAndNeurons[i - 1]; k++)
                    active += outputs[i - 1][k] * weights[i][j][k];

                active += weights[i][j][layersAndNeurons[i - 1]];

                outputs[i][j] = sigmoid(active);
            }
        }
    }

    public void propagation(double[] in, double[] expectedOutputs)
    {
        double w;

        execution(in);

        for (int i = 0; i < layersAndNeurons[layersAndNeurons.length - 1]; i++)
            errors[layersAndNeurons.length - 1][i] = 1 * outputs[layersAndNeurons.length - 1][i] * (1 - outputs[layersAndNeurons.length - 1][i]) * (expectedOutputs[i] - outputs[layersAndNeurons.length - 1][i]);

        for (int i = layersAndNeurons.length - 2; i > 0; i--)
        {
            for (int j = 0; j < layersAndNeurons[i]; j++)
            {
                w = 0.0;

                for (int k = 0; k < layersAndNeurons[i + 1]; k++)
                    w += errors[i + 1][k] * weights[i + 1][k][j];

                errors[i][j] = 1 * outputs[i][j] * (1 - outputs[i][j]) * w;
            }
        }

        for (int i = 1; i < layersAndNeurons.length; i++)
        {
            for (int j = 0; j < layersAndNeurons[i]; j++)
            {
                for (int k = 0; k < layersAndNeurons[i - 1]; k++)
                    weights[i][j][k] += momentum * previousWeights[i][j][k];

                weights[i][j][layersAndNeurons[i - 1]] += momentum * previousWeights[i][j][layersAndNeurons[i - 1]];
            }
        }

        for (int i = 1; i < layersAndNeurons.length; i++)
        {
            for (int j = 0; j < layersAndNeurons[i]; j++)
            {
                for (int k = 0; k < layersAndNeurons[i - 1]; k++)
                {
                    previousWeights[i][j][k] = learningFactor * errors[i][j] * outputs[i - 1][k];
                    weights[i][j][k] += previousWeights[i][j][k];
                }

                previousWeights[i][j][layersAndNeurons[i - 1]] = learningFactor * errors[i][j];
                weights[i][j][layersAndNeurons[i - 1]] += previousWeights[i][j][layersAndNeurons[i - 1]];
            }
        }
    }

    public double mse(double[] expectedOutputs)
    {
        double mse = 0.0;

        for (int i = 0; i < layersAndNeurons[layersAndNeurons.length - 1]; i++)
            mse += (expectedOutputs[i] - outputs[layersAndNeurons.length - 1][i]) * (expectedOutputs[i] - outputs[layersAndNeurons.length - 1][i]);

        return mse / layersAndNeurons[layersAndNeurons.length - 1];
    }

    public double getOutput(int number)
    {
        return outputs[layersAndNeurons.length - 1][number];
    }

    public double[] getOutputs()
    {
        return outputs[layersAndNeurons.length - 1];
    }

}
