package made.ann;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.List;
import java.util.Random;
import java.util.ArrayList;
import java.util.Scanner;

import made.util.Config;
import made.util.Helpers;
import made.util.Logger;

public class NeuralNetwork implements Config
{
    protected int _numLayers; // Total number of layers
    protected int[] _neuronsPerLayer; // Number of neurons in each layer
    protected List<Neuron>[] _neurons; // Neurons, one list per layer
    protected List<Float>[] _biasWeights; // Bias weights
    protected List<Float>[] _accBiasWeights; // Accumulated bias weight deltas
    protected List<Float>[] _biasWeightGradients; // Bias weight error gradients
    protected List<Float>[] _previousBiasWeightDeltas; // Previous bias weight
                                                       // deltas
    protected Float _sharpness; // Sigmoid sharpness
    protected Random _rand; // Random number generator
    protected ActivationFunctions _activation; // Activation function library
    protected Data _data;
    protected String _name;
    protected float _bias;
    protected float _momentumCoefficient;

    public NeuralNetwork(int[] neuronsPerLayer, String name, float learningRate, float momentum, int maxIterations)
    {
        _neuronsPerLayer = neuronsPerLayer;
        _name = name;
        _rand = new Random();
        _activation = new ActivationFunctions();
        _sharpness = 1.0f;
        _momentumCoefficient = momentum;

        initDataStructures();
        initWeightsRandom();
        String filename = filepath + name + "_train.txt";
        _data = new Data(filename, learningRate, momentum, maxIterations);
    }

    public NeuralNetwork(int[] neuronsPerLayer, String name)
    {
        _neuronsPerLayer = neuronsPerLayer;
        _name = name;
        _rand = null;
        _activation = null;
        _sharpness = null;
        String datafilename = filepath + name + "_train.txt";
        _data = new Data(datafilename);

        initDataStructures();
        String configFilename = filepath + name + "_config.txt";
        initWeightsFromFile(configFilename);
    }

    @SuppressWarnings("unchecked")
    protected void initDataStructures()
    {
        _bias = 1.0f;
        _numLayers = _neuronsPerLayer.length;
        _neurons = new ArrayList[_numLayers];
        _biasWeights = new ArrayList[_numLayers - 1];
        _accBiasWeights = new ArrayList[_numLayers - 1];
        _biasWeightGradients = new ArrayList[_numLayers - 1];
        _previousBiasWeightDeltas = new ArrayList[_numLayers - 1];

        // Allocate neuron lists
        for (int i = 0; i < _numLayers; i++)
            _neurons[i] = new ArrayList<Neuron>();

        // Allocate bias weight lists
        for (int i = 0; i < _numLayers - 1; i++)
        {
            _biasWeights[i] = new ArrayList<Float>(_neuronsPerLayer[i + 1]);
            _accBiasWeights[i] = new ArrayList<Float>(_neuronsPerLayer[i + 1]);
            _biasWeightGradients[i] = new ArrayList<Float>(_neuronsPerLayer[i + 1]);
            _previousBiasWeightDeltas[i] = new ArrayList<Float>(_neuronsPerLayer[i + 1]);

            for (int j = 0; j < _neuronsPerLayer[i + 1]; j++)
            {
                _biasWeights[i].add(0.0f);
                _accBiasWeights[i].add(0.0f);
                _biasWeightGradients[i].add(0.0f);
                _previousBiasWeightDeltas[i].add(0.0f);
            }
        }

        // Allocate neurons in input layer
        for (int i = 0; i < _neuronsPerLayer[0]; i++)
            _neurons[0].add(new Neuron());
    }

    protected void initWeightsFromFile(String filename)
    {
        // if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
        Logger.logLine("* Initializing neural network from file *\n");

        File file = new File(filename);
        String nextLine = "";
        Scanner fileScanner;

        try
        {
            fileScanner = new Scanner(new FileReader(file));

            try
            {
                for (int i = 1; i < _neuronsPerLayer.length; i++)
                {
                    // Logger.logLine("i: " + i + " FileScanner hasNext: " +
                    // fileScanner.hasNext());
                    nextLine = fileScanner.nextLine();
                    Scanner lineScanner = new Scanner(nextLine);
                    lineScanner.useDelimiter(" ");

                    for (int j = 0; j < _neuronsPerLayer[i]; j++)
                    {
                        _neurons[i].add(j, new Neuron(_neuronsPerLayer[i - 1]));

                        for (int k = 0; k < _neuronsPerLayer[i - 1]; k++)
                        {
                            // Logger.logLine("i: " + i + " j: " + j + " k: " +
                            // k +
                            // " lineScanner hasNext: " +
                            // lineScanner.hasNext());
                            _neurons[i].get(j).setWeight(k, Float.parseFloat(lineScanner.next()));

                            if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                                Logger.logLine("Neuron[" + i + "][" + j + "], input " + k + ", weight = " + _neurons[i].get(j).getWeight(k));
                        }

                        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                            System.out.println("");

                    }

                    lineScanner.close();
                }

                for (int i = 1; i < _numLayers; i++)
                {
                    nextLine = fileScanner.nextLine();
                    Scanner lineScanner = new Scanner(nextLine);
                    lineScanner.useDelimiter(" ");

                    for (int j = 0; j < _neuronsPerLayer[i]; j++)
                    {
                        _biasWeights[i - 1].set(j, Float.parseFloat(lineScanner.next()));
                        _accBiasWeights[i - 1].set(j, 0.0f);

                        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                            Logger.logLine("biasWeight[" + i + "][" + j + "] = " + _biasWeights[i - 1].get(j));
                    }

                    lineScanner.close();

                    if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                        System.out.println("");
                }
            } catch (Exception e)
            {
                Logger.logLine("An error ocurred while reading the file '" + filename + "'");
                Logger.logLine("Exiting...");
                System.exit(0);
            } finally
            {
                fileScanner.close();
            }
        } catch (FileNotFoundException e)
        {
            Logger.logLine("Could not find the file '" + filename + "'");
            Logger.logLine("Exiting...");
            System.exit(0);
        }
    }

    protected void initWeightsRandom()
    {
        if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
            Logger.logLine("* Initializing neural network with random weights *\n");

        for (int i = 1; i < _neurons.length; i++)
        {
            // Allocate neurons in hidden + output layers
            for (int j = 0; j < _neuronsPerLayer[i]; j++)
            {
                _neurons[i].add(new Neuron(_neuronsPerLayer[i - 1]));

                for (int k = 0; k < _neuronsPerLayer[i - 1]; k++)
                {
                    float nextRandNum = _rand.nextFloat();
                    _neurons[i].get(j).setWeight(k, nextRandNum);

                    if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                        Logger.logLine("Neuron[" + i + "][" + j + "], input " + k + ", weight = " + nextRandNum);
                }

                if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                    System.out.println("");
            }
        }

        for (int i = 1; i < _numLayers; i++)
        {
            for (int j = 0; j < _neuronsPerLayer[i]; j++)
            {
                float nextRand = _rand.nextFloat();
                _biasWeights[i - 1].set(j, nextRand);
                _accBiasWeights[i - 1].set(j, 0.0f);

                if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                    Logger.logLine("biasWeight[" + i + "][" + j + "] = " + nextRand);
            }

            if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                System.out.println("");
        }

        // initXORTest();
        // initLiapisTest();
    }

    protected void feedForward(List<Float> input)
    {
        if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
            Logger.logLine("\t* Feed forward *\n");

        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
        {
            Logger.logMethod();
            Logger.log("Input -");
        }

        // Set inputs
        for (int i = 0; i < _neurons[0].size(); i++)
        {
            if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                Logger.log(" " + input.get(i));

            _neurons[0].get(i).setOutput(input.get(i));
        }

        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
        {
            Logger.logLine();
            Logger.logLine();
        }

        // Feed results forward through network layers
        for (int i = 1; i < _neurons.length; i++)
        {
            if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
            {
                Logger.logLine("------------------------------------------------------------");
                Logger.logLine("Layer " + i);
                Logger.logLine("------------------------------------------------------------");
            }

            for (int j = 0; j < _neurons[i].size(); j++)
            {
                float sum = 0.0f;

                for (int k = 0; k < _neuronsPerLayer[i - 1]; k++)
                {
                    // if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                    // Logger.logLine("Sum is " + sum + ", adding " + _neurons[i
                    // -
                    // 1].get(k).getOutput() + " * " +
                    // _neurons[i].get(j).getWeight(k));

                    sum += _neurons[i - 1].get(k).getOutput() * _neurons[i].get(j).getWeight(k);
                }

                // if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                // Logger.logLine("Sum is " + sum + ", adding bias " +
                // _biasWeights[i - 1].get(j));

                // Add bias to sum
                sum += _bias * _biasWeights[i - 1].get(j);

                _neurons[i].get(j).setOutput(_activation.sigmoid(sum, _sharpness));

                if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                    Logger.logLine("Neuron[" + i + "][" + j + "], sum = " + sum + ", output = " + _neurons[i].get(j).getOutput());
            }

            if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                Logger.logLine();
        }
    }

    protected float computeError(int trainSetNum)
    {
        if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
            Logger.logLine("\t* Compute error *\n");

        // Compute sum of squared deviations
        float ssd = 0.0f;

        for (int i = 0; i < _neurons[_numLayers - 1].size(); i++)
        {
            ssd += (_data.getOutputs().get(trainSetNum).get(i) - _neurons[_numLayers - 1].get(i).getOutput())
                    * (_data.getOutputs().get(trainSetNum).get(i) - _neurons[_numLayers - 1].get(i).getOutput());

            // if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
            // {
            // Logger.logLine("Output " + (i + 1));
            // Logger.logLine("Actual output = " + _neurons[_numLayers -
            // 1].get(i).getOutput());
            // Logger.logLine("Desired output = " +
            // _data.getOutputs().get(trainSetNum).get(i));
            // Logger.logLine("SSD = " + ssd);
            // }
            //
            // if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
            // Logger.logLine();
        }

        ssd = 0.5f * ssd;

        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
            Logger.logLine("Total SSD = " + ssd + "\n");

        return ssd;
    }

    protected void backPropagate(int trainSet)
    {
        if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
            Logger.logLine("* Back propagation *\n");

        float gradient = 0.0f;

        // Calculate gradients for output layer neurons
        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
        {
            Logger.logLine("------------------------------------------------------------");
            Logger.logLine("Layer " + (_neurons.length - 1));
            Logger.logLine("------------------------------------------------------------");
        }

        for (int i = 0; i < _neurons[_numLayers - 1].size(); i++)
        {
            // Compute sum error gradient
            float actualOutput = _neurons[_numLayers - 1].get(i).getOutput();
            gradient = -(_data.getOutputs().get(trainSet).get(i) - actualOutput) * _sharpness * actualOutput * (1 - actualOutput);
            _neurons[_numLayers - 1].get(i).setSumGradient(gradient);

            if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                Logger.logLine("Neuron[" + (_numLayers - 1) + "][" + i + "], sumGradient = " + _neurons[_numLayers - 1].get(i).getSumGradient());

            // Compute weight error gradients
            float weightGradient = 0.0f;

            for (int j = 0; j < _neurons[_numLayers - 2].size(); j++)
            {
                weightGradient = gradient * _neurons[_numLayers - 2].get(j).getOutput();
                _neurons[_numLayers - 1].get(i).setWeightGradient(j, weightGradient);

                if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                    Logger.logLine("Neuron[" + (_numLayers - 1) + "][" + i + "], weightGradient(" + j + ") = "
                            + _neurons[_numLayers - 1].get(i).getWeightGradient(j));
            }

            weightGradient = gradient * _bias;
            _biasWeightGradients[_biasWeightGradients.length - 1].set(i, weightGradient);

            if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                Logger.logLine("biasWeightGradients[" + i + "], weightGradient  = " + _biasWeightGradients[_biasWeightGradients.length - 1].get(i));
        }

        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
            Logger.logLine();

        // Calculate gradients for hidden layer neurons
        for (int i = _numLayers - 2; i > 0; i--)
        {
            if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
            {
                Logger.logLine("------------------------------------------------------------");
                Logger.logLine("Layer " + i);
                Logger.logLine("------------------------------------------------------------");
            }

            for (int j = 0; j < _neurons[i].size(); j++)
            {
                float actualOutput = _neurons[i].get(j).getOutput();
                float factor = actualOutput * (1 - actualOutput);
                float sumOfGradients = 0.0f;
                float weightGradient = 0.0f;

                for (int k = 0; k < _neuronsPerLayer[i + 1]; k++)
                {
                    sumOfGradients += _neurons[i + 1].get(k).getSumGradient() * _neurons[i + 1].get(k).getWeight(j);

                    // if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                    // {
                    // Logger.logLine("i = " + i + ", j = " + j + ", k = " + k);
                    // Logger.logLine("Sum gradient = " + _neurons[i +
                    // 1].get(k).getSumGradient());
                    // Logger.logLine("Weight to output = " + _neurons[i +
                    // 1].get(k).getWeight(j));
                    // }
                }

                for (int k = 0; k < _neuronsPerLayer[i - 1]; k++)
                {
                    float prevNeuronOutput = _neurons[i - 1].get(k).getOutput();
                    weightGradient = factor * prevNeuronOutput * sumOfGradients;
                    _neurons[i].get(j).setWeightGradient(k, weightGradient);

                    if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                        Logger.logLine("Neuron[" + i + "][" + j + "], weightGradient(" + k + ")  = " + _neurons[i].get(j).getWeightGradient(k));
                }

                weightGradient = factor * _bias * sumOfGradients;
                _biasWeightGradients[i - 1].set(j, weightGradient);

                if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                    Logger.logLine("biasWeightGradients[" + (i - 1) + "], weightGradient  = " + _biasWeightGradients[i - 1].get(j));
            }
        }

        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
            Logger.logLine();
    }

    protected void computeDelta()
    {
        if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
            Logger.logLine("* Compute weight deltas *\n");

        for (int i = 1; i < _neurons.length; i++)
        {
            if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
            {
                Logger.logLine("------------------------------------------------------------");
                Logger.logLine("Layer " + (i + 1));
                Logger.logLine("------------------------------------------------------------");
            }

            for (int j = 0; j < _neurons[i].size(); j++)
            {
                for (int k = 0; k < _neuronsPerLayer[i - 1]; k++)
                {
                    float deltaWeight = -_data.getLearningRate() * _neurons[i].get(j).getWeightGradient(k);
                    float accWeight = 0.0f;

                    if (ANN_USE_MOMENTUM)
                    {
                        float momentum = _momentumCoefficient * _neurons[i].get(j).getPreviousWeightDelta(k);
                        accWeight = _neurons[i].get(j).getAccWeight(k) + deltaWeight + momentum;
                        _neurons[i].get(j).setPreviousWeightDelta(k, deltaWeight + momentum);

                        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                            Logger.logLine("momentum = " + momentum);
                    } else
                    {
                        accWeight = _neurons[i].get(j).getAccWeight(k) + deltaWeight;
                    }

                    _neurons[i].get(j).setAccWeight(k, accWeight);

                    if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                    {
                        Logger.logLine("gradient = " + _neurons[i].get(j).getSumGradient());
                        Logger.logLine("delta = " + deltaWeight);
                        Logger.logLine("Neuron[" + i + "][" + j + "], accumulated weight[" + k + "] = " + accWeight);
                    }
                }
            }

            if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                Logger.logLine();
        }

        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
        {
            Logger.logLine("------------------------------------------------------------");
            Logger.logLine("Bias weights");
            Logger.logLine("------------------------------------------------------------");
        }

        for (int i = 0; i < _biasWeights.length; i++)
        {
            for (int j = 0; j < _biasWeights[i].size(); j++)
            {
                float deltaWeight = -_data.getLearningRate() * _biasWeightGradients[i].get(j);
                float accWeight = 0.0f;

                if (ANN_USE_MOMENTUM)
                {
                    float momentum = _momentumCoefficient * _previousBiasWeightDeltas[i].get(j);
                    accWeight = _accBiasWeights[i].get(j) + deltaWeight + momentum;
                    _previousBiasWeightDeltas[i].set(j, deltaWeight + momentum);

                    if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                        Logger.logLine("momentum = " + momentum);
                } else
                {
                    accWeight = _accBiasWeights[i].get(j) + deltaWeight;
                }

                _accBiasWeights[i].set(j, accWeight);

                if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                {
                    Logger.logLine("gradient = " + _neurons[i + 1].get(j).getSumGradient());
                    Logger.logLine("delta = " + deltaWeight);
                    Logger.logLine("accBiasWeights[" + (i + 1) + "][" + j + "] = " + accWeight);
                }
            }
        }

        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
            Logger.logLine();
    }

    protected void updateWeightsNonBatch()
    {
        if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
            Logger.logLine("* Update weights (non-batch) *\n");

        float deltaWeight;
        float newWeight;

        for (int i = 1; i < _neurons.length; i++)
        {
            deltaWeight = 0.0f;
            newWeight = 0.0f;

            if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
            {
                Logger.logLine("------------------------------------------------------------");
                Logger.logLine("Layer " + i);
                Logger.logLine("------------------------------------------------------------");
            }

            for (int j = 0; j < _neurons[i].size(); j++)
            {
                for (int k = 0; k < _neuronsPerLayer[i - 1]; k++)
                {
                    deltaWeight = -_data.getLearningRate() * _neurons[i].get(j).getWeightGradient(k);

                    if (ANN_USE_MOMENTUM)
                    {
                        float momentum = _momentumCoefficient * _neurons[i].get(j).getPreviousWeightDelta(k);
                        newWeight = _neurons[i].get(j).getWeight(k) + deltaWeight + momentum;
                        _neurons[i].get(j).setPreviousWeightDelta(k, deltaWeight + momentum);

                        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                            Logger.logLine("momentum = " + momentum);
                    } else
                    {
                        newWeight = _neurons[i].get(j).getWeight(k) + deltaWeight;
                    }

                    _neurons[i].get(j).setWeight(k, newWeight);

                    if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                    {
                        // Logger.logLine("gradient = " +
                        // _neurons[i].get(j).getSumGradient());
                        Logger.logLine("delta = " + deltaWeight);
                        Logger.logLine("Neuron[" + i + "][" + j + "], weight[" + k + "] = " + newWeight);
                    }
                }
            }

            if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                Logger.logLine();
        }

        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
        {
            Logger.logLine("------------------------------------------------------------");
            Logger.logLine("Bias weights");
            Logger.logLine("------------------------------------------------------------");
        }

        for (int i = 0; i < _biasWeights.length; i++)
        {
            for (int j = 0; j < _biasWeights[i].size(); j++)
            {
                deltaWeight = -_data.getLearningRate() * _biasWeightGradients[i].get(j);
                float newbiasWeight = 0.0f;

                if (ANN_USE_MOMENTUM)
                {
                    float momentum = _momentumCoefficient * _previousBiasWeightDeltas[i].get(j);
                    newbiasWeight = _biasWeights[i].get(j) + deltaWeight + momentum;
                    _previousBiasWeightDeltas[i].set(j, deltaWeight + momentum);

                    if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                        Logger.logLine("momentum = " + momentum);
                } else
                {
                    newbiasWeight = _biasWeights[i].get(j) + deltaWeight;
                }

                _biasWeights[i].set(j, newbiasWeight);

                if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                {
                    // Logger.logLine("gradient = " + _neurons[i +
                    // 1].get(j).getSumGradient());
                    Logger.logLine("delta = " + deltaWeight);
                    Logger.logLine("biasWeights[" + (i + 1) + "][" + j + "] = " + newbiasWeight);
                }
            }
        }

        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
            Logger.logLine();
    }

    protected void updateWeightsBatch()
    {
        if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
            Logger.logLine("* Update weights (batch) *\n");

        float newWeight;

        for (int i = 1; i < _neurons.length; i++)
        {
            newWeight = 0.0f;

            if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
            {
                Logger.logLine("------------------------------------------------------------");
                Logger.logLine("Layer " + (i + 1));
                Logger.logLine("------------------------------------------------------------");
            }

            for (int j = 0; j < _neurons[i].size(); j++)
            {
                for (int k = 0; k < _neuronsPerLayer[i - 1]; k++)
                {
                    newWeight = _neurons[i].get(j).getWeight(k) + _neurons[i].get(j).getAccWeight(k);
                    _neurons[i].get(j).setWeight(k, newWeight);

                    if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                    {
                        Logger.logLine("accDelta = " + _neurons[i].get(j).getAccWeight(k));
                        Logger.logLine("Neuron[" + i + "][" + j + "], weight[" + k + "] = " + newWeight);
                    }

                    // Reset accumulated delta weight
                    _neurons[i].get(j).setAccWeight(k, 0.0f);
                }
            }

            if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                Logger.logLine();
        }

        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
        {
            Logger.logLine("------------------------------------------------------------");
            Logger.logLine("Bias weights");
            Logger.logLine("------------------------------------------------------------");
        }

        for (int i = 0; i < _biasWeights.length; i++)
        {
            for (int j = 0; j < _biasWeights[i].size(); j++)
            {
                newWeight = _biasWeights[i].get(j) + _accBiasWeights[i].get(j);
                _biasWeights[i].set(j, newWeight);

                if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                {
                    Logger.logLine("accDelta = " + _accBiasWeights[i].get(j));
                    Logger.logLine("biasWeights[" + (i + 1) + "][" + j + "] = " + newWeight);
                }

                // Reset accumulated delta weight
                _accBiasWeights[i].set(j, 0.0f);
            }
        }

        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
            Logger.logLine();
    }

    protected void saveWeights()
    {
        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
            Logger.logLine("* Initializing neural network from file *\n");

        BufferedWriter bufferedWriter = null;

        try
        {
            bufferedWriter = new BufferedWriter(new FileWriter(filepath + _name + "_config.txt", false));

            for (int i = 1; i < _neuronsPerLayer.length; i++)
            {
                String nextLine = "";

                for (int j = 0; j < _neuronsPerLayer[i]; j++)
                {
                    for (int k = 0; k < _neuronsPerLayer[i - 1]; k++)
                    {
                        nextLine += _neurons[i].get(j).getWeight(k);

                        // if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                        // Logger.logLine("Writing weight for neuron[" + i +
                        // "][" +
                        // j + "], input " + k + ", weight = "
                        // + _neurons[i].get(j).getWeight(k));
                        nextLine += " ";
                    }
                }

                if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                    Logger.logLine(nextLine);

                bufferedWriter.write(nextLine);
                bufferedWriter.newLine();
            }

            for (int i = 1; i < _numLayers; i++)
            {
                String nextLine = "";

                for (int j = 0; j < _neuronsPerLayer[i]; j++)
                {
                    nextLine += _biasWeights[i - 1].get(j);

                    // if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                    // Logger.logLine("Writing biasWeight[" + i + "][" + j +
                    // "] = "
                    // + _biasWeights[i - 1].get(j));

                    // if (j < _neuronsPerLayer[i] - 1)
                    nextLine += " ";
                }

                if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                    Logger.logLine(nextLine);

                bufferedWriter.write(nextLine);
                bufferedWriter.newLine();
            }

        } catch (FileNotFoundException ex)
        {
            ex.printStackTrace();
        } catch (IOException ex)
        {
            ex.printStackTrace();
        } finally
        {
            // Close the BufferedWriter
            try
            {
                if (bufferedWriter != null)
                {
                    bufferedWriter.flush();
                    bufferedWriter.close();
                }
            } catch (IOException ex)
            {
                ex.printStackTrace();
            }
        }
    }

    public void train()
    {
        if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
            Logger.logLine("######## Training neural network ########\n");

        int iterations = 0;
        float previousError = 0.0f;

        // while (iterations < 1)
        while (true)
        {
            long start = System.currentTimeMillis();

            if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
            {
                if (iterations % EPOCH_PRINT_CYCLE == 0)
                    Logger.logLine("######## EPOCH " + iterations + " ########\n");
            }

            float error = 0.0f;
            int dataSize = _data.getInputs().size();

            for (int i = 0; i < dataSize; i++)
            {
                feedForward(_data.getInputs().get(i));

                error += computeError(i);

                if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
                {
                    Logger.logMethod();
                    Logger.log("Output for pattern " + i + " = ");

                    for (int j = 0; j < _neurons[_numLayers - 1].size(); j++)
                    {
                        Logger.log(" " + _neurons[_numLayers - 1].get(j).getOutput());
                    }

                    Logger.logLine();
                    Logger.logLine("Error after pattern " + i + " = " + error + "\n");
                }

                backPropagate(i);

                if (BATCH_UPDATE)
                    computeDelta();
                else
                {
                    updateWeightsNonBatch();
                }
            }

            error = error / _data.getInputs().size();

            // if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
            // Logger.logLine("Error sum = " + error);

            if (error < 0.0001f)
            // if (error < 0.001f || iterations == _data.getMaxIterations())
            {
                saveWeights();
                Logger.logLine("Training concluded after " + iterations + " iterations");
                Logger.logLine();
                return;
            }

            if (iterations % EPOCH_PRINT_CYCLE == 0 && MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.SILENT) > 0)
            {
                Logger.logLine("Epoch error = " + error);
                Logger.logLine("Epoch error delta = " + (error - previousError));
                previousError = error;
                long epochTime = (System.currentTimeMillis() - start);
                Logger.logLine("Epoch cycle time = " + epochTime);
                Logger.logLine();
            }

            if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.SILENT && MPM_MODE == MODE.TRAIN)
            {
                Logger.log(iterations + " " + error + "\n");
                Helpers.appendStringToFile("ann_convergence", iterations + " " + error, true);
            }

            if (BATCH_UPDATE)
            {
                updateWeightsBatch();
            }

            iterations++;
        }
    }

    public float[] getOutput(List<Float> pattern)
    {
        if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
        {
            Logger.logLine("#### Get output ####\n");
            Logger.logMethod();
            Logger.log("Input is: ");

            for (int j = 0; j < pattern.size(); j++)
            {
                Logger.log(pattern.get(j) + " ");
            }

            Logger.logLine();
            Logger.logLine();
        }

        float[] output = new float[_neuronsPerLayer[_neuronsPerLayer.length - 1]];

        feedForward(pattern);

        for (int i = 0; i < _neurons[_neuronsPerLayer.length - 1].size(); i++)
        {
            output[i] = _neurons[_neuronsPerLayer.length - 1].get(i).getOutput();
        }

        if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
        {
            Logger.logMethod();
            Logger.log("Output is: ");
            
            for (int i = 0; i < output.length; i++)
            {
                Logger.log(output[i] + " ");
            }

            Logger.logLine();
            Logger.logLine();
        }

        return output;
    }

    public Data getData()
    {
        return _data;
    }

    public void setData(Data data)
    {
        _data = data;
    }

    // ////////////////////////////// XOR TEST
    protected void initXORTest()
    {
        if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
        {
            Logger.logLine("* XOR test initialization *\n");
        }

        // Initialization function for XOR test
        _neurons[0].get(0).setOutput(1.0f); // input 0
        _neurons[0].get(1).setOutput(1.0f); // input 1

        _neurons[1].get(0).setWeight(0, 0.5f); // w13
        _neurons[1].get(0).setWeight(1, 0.4f); // w23

        _neurons[1].get(1).setWeight(0, 0.9f); // w14
        _neurons[1].get(1).setWeight(1, 1.0f); // w24

        _neurons[2].get(0).setWeight(0, -1.2f); // w35
        _neurons[2].get(0).setWeight(1, 1.1f); // w45

        _biasWeights[0].set(0, 0.8f); // θ3
        _biasWeights[0].set(1, -0.1f); // θ4
        _biasWeights[1].set(0, 0.3f); // θ5

        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
        {
            for (int i = 1; i < _numLayers; i++)
            {
                Logger.logLine("------------------------------------------------------------");
                Logger.logLine("Layer " + (i + 1));
                Logger.logLine("------------------------------------------------------------");

                for (int j = 0; j < _neuronsPerLayer[i]; j++)
                {
                    for (int k = 0; k < _neuronsPerLayer[i - 1]; k++)
                    {
                        Logger.logLine("Neuron[" + i + "][" + j + "], input " + k + ", weight = " + _neurons[i].get(j).getWeight(k));
                    }
                }

                Logger.logLine();
            }

            Logger.logLine();

            for (int i = 0; i < _biasWeights.length; i++)
                for (int j = 0; j < _biasWeights[i].size(); j++)
                    Logger.logLine("biasWeights[" + i + "][" + j + "], weight = " + _biasWeights[i].get(j));

            Logger.logLine();
        }
    }

    // ////////////////////////////// LIAPIS TEST
    protected void initLiapisTest()
    {
        if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
        {
            Logger.logLine("* Liapis test initialization *\n");
        }

        // Initialization function for Liapis test
        _neurons[1].get(0).setWeight(0, 0.5f); // w13

        _neurons[1].get(1).setWeight(0, -0.7f); // w14

        _neurons[2].get(0).setWeight(0, 1.0f); // w35
        _neurons[2].get(0).setWeight(1, 0.8f); // w45

        _biasWeights[0].set(0, -1.0f); // θ3
        _biasWeights[0].set(1, 0.2f); // θ4
        _biasWeights[1].set(0, -0.1f); // θ5

        if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
        {
            for (int i = 1; i < _numLayers; i++)
            {
                Logger.logLine("------------------------------------------------------------");
                Logger.logLine("Layer " + (i + 1));
                Logger.logLine("------------------------------------------------------------");

                for (int j = 0; j < _neuronsPerLayer[i]; j++)
                {
                    for (int k = 0; k < _neuronsPerLayer[i - 1]; k++)
                    {
                        Logger.logLine("Neuron[" + i + "][" + j + "], input " + k + ", weight = " + _neurons[i].get(j).getWeight(k));
                    }
                }

                Logger.logLine();
            }

            Logger.logLine();

            for (int i = 0; i < _biasWeights.length; i++)
                for (int j = 0; j < _biasWeights[i].size(); j++)
                    Logger.logLine("biasWeights[" + i + "][" + j + "], weight = " + _biasWeights[i].get(j));

            Logger.logLine();
        }
    }

    public float testOnData()
    {
        float error = 0.0f;

        for (int i = 0; i < _data.getInputs().size(); i++)
        {
            feedForward(_data.getInputs().get(i));
            error += computeError(i);

            if (MPM_DEBUG_LEVEL == DEBUG_LEVEL.VERBOSE)
            {
                Logger.logMethod();
                Logger.log("Output for pattern " + i + " = ");

                for (int j = 0; j < _neurons[_numLayers - 1].size(); j++)
                {
                    Logger.log(" " + _neurons[_numLayers - 1].get(j).getOutput());
                }

                Logger.logLine();
            }

            // Logger.logLine("Error after pattern " + i + " = " + error +
            // "\n");
        }

        error = error / _data.getInputs().size();

        Logger.logLine("Total error for data set is: " + error + "\n");

        if (error < 0.001f)
            Logger.logLine("Network passed test on data set");
        else
            Logger.logLine("Network failed test on data set");

        return error;
    }
}