package archive;

import java.io.FileWriter;
import java.io.IOException;
import java.util.Scanner;

import neuralnetworks.NeuralNetwork;


public class MultiLayerPerceptronJaap implements NeuralNetwork
{
    NeuronLayer [] d_layers;
    int            d_inputlayersize;
    int            d_outputlayersize;
    double         d_learningrate; //learningrate
    
    public MultiLayerPerceptronJaap(int [] layersetup, int inputlayersize, double learningrate)//construct mlp from arguments
    {
        
        d_learningrate   = learningrate;
        d_layers = new NeuronLayer[layersetup.length];
        d_inputlayersize  = inputlayersize;
        int lastlayersize = inputlayersize;                 //inputlayer is implied but does not exist, there is also no output function on these
        for(int index = 0; index < d_layers.length; ++index)
        {
            d_layers[index] = new NeuronLayer(layersetup[index], lastlayersize);
            lastlayersize = layersetup[index];
        }
        d_outputlayersize = d_layers[d_layers.length -1].d_activations.length;
    }
    
    public MultiLayerPerceptronJaap(Scanner scanner)//construct mlp from file
    {
        
        d_learningrate   = scanner.nextDouble();
        d_layers = new NeuronLayer[scanner.nextInt()];
        d_inputlayersize = scanner.nextInt();
        for(int index = 0; index < d_layers.length; ++index)
            d_layers[index] = new NeuronLayer(scanner);
        d_outputlayersize = d_layers[d_layers.length -1].d_activations.length;
    }
    
    public double [] process(double [] input)
    {
        if (d_inputlayersize > input.length)
            System.out.println("Warning inputvector to short");
        
        double [] output = input.clone();
        for(int index = 0; index < d_layers.length; ++index)
            output = d_layers[index].forwardPass(output);
        return output;
    }
    
    public void train(double [] input, double reward)
    {
        double [] target     = new double[1];
        target [0] = reward;
    }
    
    public void train(double [] input, double [] target) //returns output error from before the update;
    {
        double [] output     = process(input);
        double [] ouputerror = substract(target,output);
        backProbagate(ouputerror);
    }
    
    public void train(Example example) //returns output error from before the update;
    {
        train(example.getInput(), example.getTarget());
    }
    
    
    private void backProbagate(double[] ouputerror)
    {
        double [] layeroutput = ouputerror.clone();
        //backpropagation of error
        for(int index = d_layers.length - 1; index >= 0; --index)
            layeroutput = d_layers[index].assignError(layeroutput);
        //adusting weigths
        for(int index = 0; index < d_layers.length; ++index)
            d_layers[index].changeWeigths(d_learningrate);
    }

    public void writeToFile(FileWriter writer) throws IOException //writes the network to a file (including weigths)
    {
        writer.write(Double.toString(d_learningrate) + " ");
        writer.write(Integer.toString(d_layers.length) + " ");
        writer.write(Integer.toString(d_inputlayersize) + " ");
        for(int index = 0; index < d_layers.length; ++index)
            d_layers[index].writeToFile(writer);
    }
    
    public double [] substract(double [] array1, double [] array2) // pre-allocate result?
    {
        if(array1.length != array2.length)
            return null;
        double [] result = new double [array1.length];
        for(int index = 0; index < array1.length; ++index)
            result[index] = array1[index] - array2[index];
        return result;
    }

    public void setLearingRate(double learningrate)
    {
        d_learningrate   = learningrate;
    }
    
    public int getInputLayerSize()
    {
        return d_inputlayersize;
    }

    public int getOutputLayerSize()
    {
        return d_outputlayersize;
    }




}

