#include "rna/backpropagation.h"

#include <cstdlib>
#include <limits>
#include <time.h>
#include <iostream>
#include <fstream>
#include <utility>
#include <vector>
#include <algorithm>

#include "network.h"

Backpropagation::Backpropagation(Network *network, data_t learningRate, data_t momentum) {
    _network = network;

    size_t layersCount = _network->layersCount(),
            inputCount = _network->neuronsCount(0),
            neuronsCount;

    _learningRate = learningRate;
    _momentum = momentum;
    _iterations = 1000000;
    _errorThresh = 0.00001;

    _accumulators = new data_t* [layersCount];
    _outputs = new data_t* [layersCount];
    _errors = new data_t* [layersCount];
    _changes = new data_t** [layersCount];
    _biasChanges = new data_t* [layersCount];
    _outputs[0] = new data_t [inputCount];
    for ( size_t l = 1; l < layersCount; l++ ) {
        neuronsCount = _network->neuronsCount(l);
        _biasChanges[l] = new data_t[neuronsCount];
        _accumulators[l] = new data_t [neuronsCount];
        _outputs[l] = new data_t [neuronsCount];
        _errors[l] = new data_t [neuronsCount];
        _changes[l] = new data_t* [neuronsCount];
        for ( size_t n = 0; n < neuronsCount; n++ ) {
            _changes[l][n] = new data_t [inputCount];
            _biasChanges[l][n] = 0;
            for ( size_t s = 0; s < inputCount; s++ )
                _changes[l][n][s] = 0;
        }
        inputCount = neuronsCount;
    }
}

Backpropagation::~Backpropagation() {
    size_t layersCount = _network->layersCount();

    for ( size_t l = 1; l < layersCount; l++ ) {
        MatrixUtils::destroy(_changes[l], _network->neuronsCount(l));
    }
    delete _changes;

    MatrixUtils::destroy(_biasChanges, layersCount);
    MatrixUtils::destroy(_changes, layersCount);
    MatrixUtils::destroy(_errors, layersCount);
    MatrixUtils::destroy(_accumulators, layersCount);
    MatrixUtils::destroy(_outputs, layersCount);
}

size_t Backpropagation::train(const std::string &filename) {
    size_t iterations;

    std::vector<const input_t*> data = this->load(filename);
    iterations = train(data);
    this->destroy(data);
    return iterations;
}

std::vector<const input_t*> Backpropagation::load(const std::string &filename) {
    std::ifstream stream(filename.c_str());

    size_t inputSize = _network->neuronsCount(0),
            outputSize = _network->neuronsCount(_network->layersCount()-1),
            length;

    data_t *input, *output;

    if ( !stream.is_open() )
        throw Backpropagation::FileNotFound;

    stream >> length;
    std::vector< const input_t* > data(length);

    for ( size_t i = 0; i < length; i++ ) {
        input = new data_t[inputSize];
        output = new data_t[outputSize];

        for ( size_t j = 0; j < inputSize; j++ )
            stream >> input[j];
        for ( size_t j = 0; j < outputSize; j++ )
            stream >> output[j];

        data[i] = new std::pair<data_t*, data_t*> (input, output);
    }

    stream.close();
    return data;
}

void Backpropagation::destroy(std::vector<const input_t*>& data) {
    std::vector< const input_t* >::iterator begin = data.begin(),
            end = data.end();

    while ( begin < end ) {
        const input_t *pair = *begin;
        delete pair->first;
        delete pair->second;
        delete pair;
    }

    data.resize(0);
}

size_t Backpropagation::train(const std::vector<const input_t*>& data) {
    size_t iterations = this->_iterations,
            size = data.size(),
            i = 0;

    std::vector< const input_t* > d(data);

    data_t errorThresh = _errorThresh,
            error;

    std::vector< const input_t* >::const_iterator begin, end;

    do {
        error = 0;
        std::random_shuffle(d.begin(), d.end());
        begin = d.begin();
        end = d.end();
        while ( begin < end ) {
            error += train(**begin);
            begin++;
        }
        error /= size;
        i++;
    } while ( i < iterations && error >= errorThresh );

    return i;
}

data_t Backpropagation::train(const std::pair<data_t*, data_t*> &data) {
    this->forward(data.first);
    data_t error = this->backward(data.second);
    _network->applyChages(_changes, _biasChanges);
    return error;
}

void Backpropagation::forward(const data_t *input) {
    size_t layersCount = _network->layersCount(),
            inputCount = _network->neuronsCount(0),
            count;

    data_t ***synapses = _network->synapses(),
            **biases = _network->biases(),
            accumulator;

    std::copy(input, input+inputCount, _outputs[0]);

    for ( size_t l = 1; l < layersCount; l++ ) {
        count = _network->neuronsCount(l);
        for ( size_t n = 0; n < count ; n++ ) {
            accumulator = biases[l][n];
            for ( size_t s = 0; s < inputCount; s++ ) {
                accumulator += synapses[l][n][s] * _outputs[l-1][s];
            }
            _accumulators[l][n] = accumulator;
            _outputs[l][n] = _network->activation(l)->function(accumulator);
        }
        inputCount = count;
    }
}

data_t Backpropagation::backward(data_t *target) {
    data_t error = this->calculateErrors(target);
    this->calculateChanges();
    return error;
}

void Backpropagation::setErrorThresh(data_t errorThresh) {
    _errorThresh = errorThresh;
}

void Backpropagation::setMaxIterations(size_t iterations) {
    _iterations = iterations;
}

void Backpropagation::calculateChanges() {

    size_t inputCount = _network->neuronsCount(0),
            layersCount = _network->layersCount(),
            count;

    data_t learningrate = _learningRate,
            momentum = _momentum, e;

    for ( size_t l = 1; l < layersCount; l++ ) {
        count = _network->neuronsCount(l);
        for ( size_t n = 0; n < count; n++ ) {
            e = _errors[l][n];
            for ( size_t s = 0; s < inputCount; s++ ) {
                _changes[l][n][s] = ( momentum * _changes[l][n][s] )
                        + ( learningrate * e * _outputs[l-1][s] );
            }
            _biasChanges[l][n] = momentum * _biasChanges[l][n] + learningrate * e;
        }
        inputCount = count;
    }
}

data_t Backpropagation::calculateErrors(const data_t *target) {
    size_t l = _network->layersCount()-1,
            count;

    data_t ***weights = _network->synapses(),
            error, outputCount, neuronOutput, e;

    // Last Layer
    count = _network->neuronsCount(l);
    error = 0;
    for ( size_t n = 0; n < count; n++ ) {
        neuronOutput = _outputs[l][n];
        e = target[n] - neuronOutput;
        _errors[l][n] = _network->activation(l)->derivative(_accumulators[l][n], neuronOutput) * e;
        error += pow(e, 2);
    }
    error /= l;

    // Inner Layers
    l--;
    for (; l > 0; l-- ) {
        count = _network->neuronsCount(l);
        for ( size_t n = 0; n < count; n++ ) {
            e = 0;
            for ( size_t s = 0; s < outputCount; s++ ) {
                e += _errors[l+1][s] * weights[l][s][n];
            }
            _errors[l][n] = _network->activation(l)->derivative(_accumulators[l][n], _outputs[l][n]) * e;
        }
        outputCount = count;
    }
    return error;
}

void Backpropagation::randomize() {
    size_t input = _network->neuronsCount(0),
            layersCount = _network->layersCount(),
            neurons;

    srand(time(NULL));
    data_t*** synapses = _network->synapses(),
            ** biases = _network->biases();
    for ( size_t l = 1; l < layersCount; l++ ) {
        neurons = _network->neuronsCount(l);
        for ( size_t n = 0; n < neurons; n++ ) {
            for ( size_t s = 0; s < input; s++ ) {
                synapses[l][n][s] = (((data_t)rand())/RAND_MAX);
            }
            biases[l][n] = (((data_t)rand())/RAND_MAX);
        }
        input = neurons;
    }
}
