#include "neuralnetwork.h"

#include "utils/myMath.h"

//*************************************************************************************************
// Constructor
//*************************************************************************************************
NeuralNetwork::NeuralNetwork()
{
    _layers = 0;
}

//*************************************************************************************************
// Destructor
//*************************************************************************************************
NeuralNetwork::~NeuralNetwork()
{
    clear();
}

//*************************************************************************************************
// Function clear
//*************************************************************************************************
void NeuralNetwork::clear()
{
    if (_layers > 0)
    {
        delete [] _neuronsPerLayer;

        for (int i=0; i<_layers; i++)
            delete [] _weights[i];
        delete [] _weights;

        _layers = 0;
    }
}


//*************************************************************************************************
// Function createNetwork
/// @param layers The number of layers
/// @param neuronsPerLayer The number of neuron on each layer
//*************************************************************************************************
void NeuralNetwork::createNetwork(int layers, int* neuronsPerLayer)
{
    // Delete the old network
    clear();

    _layers = layers;
    _neuronsPerLayer = new int[layers];
    _neurons = 0;

    // Create the layers
    for (int i=0; i<layers; i++)
    {
        _neuronsPerLayer[i] = neuronsPerLayer[i];
        _neurons += neuronsPerLayer[i];
    }

    // Create the weights table
    _weights = new double*[_neurons];
    for (int i=0; i<_neurons; i++)
        _weights[i] = new double[_neurons];
}

//*************************************************************************************************
// Function createNetwork
/// @param from The source neuron
/// @param to The target neuron
/// @param weight The Synaptic weight
//*************************************************************************************************
void NeuralNetwork::setWeight(int from, int to, double weight)
{
    if (_layers > 0)
        _weights[from][to] = weight;
}

//*************************************************************************************************
// Function getValue
/// @param inputs The inputs
/// @param neuron The neuron id
/// @return The value of the neuron
//*************************************************************************************************
double NeuralNetwork::getValue(double* inputs, int neuron)
{
    double val = 0;

    // Find the concerned layer
    int layer = 0;
    int count;
    for (count=_neuronsPerLayer[layer]; neuron>=count; count+=_neuronsPerLayer[layer])
        layer++;

    // Set the bias
    if (neuron == count-1 && neuron != _neurons-1)
        return 1.0;

    if (layer == 0)
        return inputs[neuron];
    else
    {
        // Calculate the neuron offset
        int offset = 0;
        for (int i=0; i<layer; i++)
            offset += _neuronsPerLayer[i];

        // Calculate the total value of the neuron
        for (int i=0; i<offset; i++)
            val += _weights[i][neuron] * getValue(inputs, i); 

        double val2 = Math::dtanh(val);

        return val2;
    }
}


//*************************************************************************************************
// Function createNetwork
//
// At this moment, only manages connections between adjacent layers
/// @param[in,out] inputs The input of the network
/// @param[in,out] outpus The output of the network
/// @return A boolean that says if successful or not
//*************************************************************************************************
bool NeuralNetwork::runNetwork(double* inputs, double* outputs)
{
    if (_layers == 0)
        return false;

    for (int i=0; i<_neuronsPerLayer[_layers-1]; i++)
    {
        outputs[i] = getValue(inputs, (_neurons-_neuronsPerLayer[_layers-1]) + i);
    }

    return true;
}
