/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package pl.edu.agh.student.nn.core.bp;

import pl.edu.agh.student.nn.core.*;

/**
 * @author janburkot
 */
public class BPLayer extends Layer {
    private BPLayer nextLayer;

    public BPLayer(int size, LayerType type) {
        super(size, type);
    }

    public void backPropagation(int currentIteration, TrainingSet trainingSet) {
        double learningRate = getDoubleParam(LayerParameter.ALPHA, currentIteration);
        double momentum = getDoubleParam(LayerParameter.MOMENTUM, currentIteration);
        for (int k = 0; k < getNeurons().size(); k++) {
            calculateBpError(k, trainingSet);
            adjustWeightsBp(currentIteration, k, learningRate, momentum);
        }
    }

    private void adjustWeightsBp(int currentIteration, int neuronIndex_k_m,
                                 double learningRate, double momentum) {
        BpNeuron neuron_k_m = (BpNeuron) getNeurons().get(neuronIndex_k_m);

        for (int i = 0; i < neuron_k_m.getInputConnections().size(); i++) {
            Connection connection_i_k = neuron_k_m.getInputConnections().get(i);
            double x_i_m = connection_i_k.getFromNeuron().getOutput();
            double oldWeight_i_k_m = connection_i_k.getWeight();
            double newWeight_i_k_m = oldWeight_i_k_m + learningRate * neuron_k_m.getError() * x_i_m;
            if (currentIteration > 0)
                newWeight_i_k_m += (oldWeight_i_k_m - connection_i_k.getOldWeight()) * momentum;

            connection_i_k.setWeight(newWeight_i_k_m);
        }

        if (neuron_k_m.hasBias()) {
            double oldBiasWeight_k_m = neuron_k_m.getBias();
            double newBiasWeight_k_m = oldBiasWeight_k_m + learningRate * neuron_k_m.getError();
            if (currentIteration > 0)
                newBiasWeight_k_m += (oldBiasWeight_k_m - neuron_k_m.getOldBias()) * momentum;

            neuron_k_m.setBias(newBiasWeight_k_m);
        }
    }

    private void calculateBpError(int neuronIndex_k_m, TrainingSet trainingSet) {
        BpNeuron neuron_k_m = (BpNeuron) getNeurons().get(neuronIndex_k_m);

        double delta_k_m;
        double functionDerivative_k_m = neuron_k_m.getActivationFunction().d(neuron_k_m.sumInputs());

        if (nextLayer == null) {
            //warstwa wyjsciowa
            double expectedResult_k = trainingSet.getExpected()[neuronIndex_k_m];
            delta_k_m = functionDerivative_k_m * (expectedResult_k - neuron_k_m.getOutput());
        } else {
            //warstwy ukryte
            double sum = 0.0;
            for (Neuron neuron : nextLayer.getNeurons()) {
                BpNeuron neuron_k_m1 = (BpNeuron) neuron;
                double delta_k_m1 = neuron_k_m1.getError();
                double weight_w_k_m1 = neuron_k_m1.getInputConnections().get(neuronIndex_k_m).getOldWeight();
                sum += weight_w_k_m1 * delta_k_m1;
            }
            delta_k_m = functionDerivative_k_m * sum;
        }

        neuron_k_m.setError(delta_k_m);
    }

    public BPLayer getNextLayer() {
        return nextLayer;
    }

    public void setNextLayer(BPLayer nextLayer) {
        this.nextLayer = nextLayer;
    }
}
