package pl.edu.agh.student.nn.core;

import pl.edu.agh.student.nn.core.bp.BPLayer;

import java.util.ArrayList;
import java.util.List;

/**
 * Prosta siec nauronowa
 */
public class NeuralNetwork {
    protected List<Layer> layers = new ArrayList<Layer>();
    private List<TrainingSet> lerningData = new ArrayList<TrainingSet>();
    private int currentIteration = 0;

    /**
     * Wyliczanie wyjscia sieci
     */
    public void calculate() {
        for (Layer layer : layers)
            layer.calculate();
    }

    public boolean addLayer(Layer layer) {
        return layers.add(layer);
    }

    public boolean removeLayer(Layer layer) {
        return layers.remove(layer);
    }

    public List<Layer> getLayers() {
        return layers;
    }

    public void setLayers(List<Layer> layers) {
        this.layers = layers;
    }

    public Layer getOutputLayer() {
        return layers.get(layers.size() - 1);
    }

    public void learn() {
        for (TrainingSet trainingSet : lerningData) {
            setInput(trainingSet.getElements());
            calculate();
            for (Layer layer : layers)
                layer.learn(currentIteration, trainingSet);
        }
        currentIteration++;
    }

    public void backPropagation(int displayErrorStep) {
        String rmsErrorDisplay = "";
        double rmsMean = 0.0;
        for (TrainingSet trainingSet : lerningData) {
            setInput(trainingSet.getElements());
            calculate();
            for (int i = layers.size() - 1; i > 0; i--) {
                BPLayer bpLayer = (BPLayer) layers.get(i);
                bpLayer.backPropagation(currentIteration, trainingSet);
            }
            if (displayErrorStep > 0 && (currentIteration + 1) % displayErrorStep == 0) {
                double rmsError = calculateRmsError(trainingSet);
                rmsMean += rmsError;
                rmsErrorDisplay += String.format("%.5f  ", rmsError);
            }
        }
        currentIteration++;

        rmsMean /= lerningData.size();
        if (rmsErrorDisplay.length() > 0) {
            System.out.println(String.format("[%6d] RMS=%s, mean=%.5f",
                    currentIteration, rmsErrorDisplay, rmsMean));
        }
    }

    public double calculateRmsError(TrainingSet trainingSet) {
        calculate();
        double sum = 0.0;
        double[] output = getOutput();
        for (int i = 0; i < output.length; i++) {
            sum += Math.pow(trainingSet.getExpected()[i] - output[i], 2);
        }
        double rmsError = Math.sqrt(sum / output.length);
        return rmsError;
    }

    public double[] getOutput() {
        int vsize = getLayers().get(getLayers().size() - 1).getNeurons().size();
        double[] v = new double[vsize];

        for (int i = 0; i < vsize; i++) {
            Neuron outputNeuron = getLayers().get(getLayers().size() - 1).getNeurons().get(i);
            v[i] = outputNeuron.getOutput();
        }
        return v;
    }

    public void setInput(double[] v) {
        Layer inputLayer = getLayers().get(0);

        for (int i = 0; i < v.length; i++) {
            ((InputNeuron) inputLayer.getNeurons().get(i)).setInput(v[i]);
        }
    }

    /**
     * Wypisanie na standardowe wyjscie struktury sieci
     */
    public void display() {
        StringBuffer sb = new StringBuffer();
        sb.append(getLayers().get(0).getNeurons().size()).append("\n");
        if (getLayers().size() - 2 > 0) {
            for (int i = 1; i <= getLayers().size() - 2; i++)
                sb.append(getLayers().get(i).getNeurons().size()).append(" ");
            sb.append("\n");
        }
        sb.append(getLayers().get(getLayers().size() - 1).getNeurons().size()).append("\n");
        for (int i = 0; i < layers.size(); i++)
            sb.append(layers.get(i).display());
        System.out.println(sb.toString());
    }

    /**
     * Wypisanie wyliczonej odpowiedzi sieci
     */
    public void showOutput() {
        for (double d : getOutput()) {
            System.out.println(d);
        }
    }

    /**
     * Wypisanie wyliczonych wartosci dla kazdego neuronu
     */
    public void showFullOutput() {
        for (Layer l : getLayers()) {
            for (Neuron n : l.getNeurons())
                System.out.print((n instanceof InputOutputNeuron ?
                        ((InputOutputNeuron) n).sumInputs() : n.getOutput()) + " ");
            System.out.println();
        }
    }

    public List<TrainingSet> getLerningData() {
        return lerningData;
    }

    public void setLerningData(List<TrainingSet> lerningData) {
        this.lerningData = lerningData;
    }

    public int getCurrentIteration() {
        return currentIteration;
    }

    public void displayMxX(int m) {
        for (Neuron neuron : getOutputLayer().getNeurons()) {
            InputOutputNeuron ioNeuron = (InputOutputNeuron) neuron;
            int i = 1;
            System.out.printf("bias=%.2f\n", ioNeuron.getBias());
            for (Connection c : ioNeuron.getInputConnections()) {
                System.out.printf("%.2f ", c.getWeight());
                if (i++ % m == 0)
                    System.out.println();
            }
            System.out.println("=======================");
        }
    }
}
