/* 
 * File:   Network.cpp
 * Author: André
 * 
 * Created on November 30, 2012, 9:09 PM
 */

#include "Network.h"
#include <stdlib.h>
#include <math.h>
#include <conio.h>
#include <stdio.h>

#define DEBUG

Network :: Network(DataSet* _data)
{
    numberOfInputNeurons = _data->getNumInputs();
    numberOfOutputNeurons = _data->getNumOutputs();
    
    learningRate = _data->getLearningRate();
    errorTolerance = _data->getErrorTolerance();
    
    ////////////// creating neuron structures //////////////
    inputNeuronArray = new Neuron[numberOfInputNeurons];                        // initialize array of input neurons
    outputNeuronArray = new Neuron[numberOfOutputNeurons];                      // initialize array of output neurons
    
    biasNeuron = new Neuron();                                                  // create ONE bias neuron (it connects to everyone)
    biasNeuron->setValue(1);                                                    // set value to 1
   
    numberOfHiddenLayers = _data->getNumHiddenLayers();                          // obtain number of hidden layers
    numberOfNeuronsPerHiddenLayer = _data->getNumOfNeuronsPerHiddenLayer();      // obtain array containing number of neurons on each layer

    hiddenNeuronArray = new Neuron*[numberOfHiddenLayers];                      // creates an array for storing number of neurons per hidden layer

    for (int j = 0; j < numberOfHiddenLayers; j++)
        hiddenNeuronArray[j] = new Neuron[numberOfNeuronsPerHiddenLayer[j]];    // each layer contains as many nodes as specified

    createConnections();                                                        // creates connection structures
    initializeWeights();                                                        // connect neurons and initialize weights
    
    data = _data;

   
}

void Network :: createConnections()
{
    // create connection matrix
    // index 1 - layer of outbound connection
    // index 2 - neuron of outbound connection
    // index 3 - neuron of inbound connection
    // e.g. connectionArray[2][3][2] - connection leaving from 3rd layer, from 4th node to 3rd node on 4th layer
    //                     [x][y][z] - connection leaving from layer x-1, from y-1th node to z-1th node on x+1th layer
    
    connectionArray = new Connection**[numberOfConnections()];                  // for n layers, there will be n-1 set of connections (between layers)
    
    for (int i = 0; i < numberOfConnections(); i++)
    {
        if (i == 0)                                                             // when in the first layer
        {
            connectionArray[i] = new Connection*[numberOfInputNeurons];

            for (int j = 0; j < numberOfInputNeurons; j++)
                connectionArray[i][j] = new Connection[numberOfNeuronsPerHiddenLayer[i]];  

        }

        else

        if (i == numberOfConnections() - 1)                                     // when in second to last layer
        {
            connectionArray[i] = new Connection*[numberOfNeuronsPerHiddenLayer[numberOfHiddenLayers -1]];

            for (int j = 0; j < numberOfNeuronsPerHiddenLayer[numberOfHiddenLayers - 1]; j++)
                connectionArray[i][j] = new Connection[numberOfOutputNeurons];
        }

        else                                                                    // every other layer in the middle
        {
            connectionArray[i] = new Connection*[numberOfNeuronsPerHiddenLayer[i-1]];

            for (int j = 0; j < numberOfNeuronsPerHiddenLayer[i-1]; j++)
                connectionArray[i][j] = new Connection[numberOfNeuronsPerHiddenLayer[i]];   
        }

    }

    // create bias connection arrays
    biasConnections = new Connection*[numberOfConnections()];                   // biases connect to every layer BUT the input layer

    for (int i = 0; i < numberOfConnections(); i++)
    {
        if (i == numberOfConnections() - 1)                                     // when connecting to the output layer
            biasConnections[i] = new Connection[numberOfOutputNeurons]; 
        else
            biasConnections[i] = new Connection[numberOfNeuronsPerHiddenLayer[i]]; // when connecting to every other hidden layer
    }
    
}

Network :: ~Network() {
}

double Network :: sigmoid(double value)
{
    double numerator = 1;
    double denominator = 1 + pow(M_E, -value);
    
    return numerator/denominator;
}

int Network :: totalNumberOfLayers()
{
    return (2 + numberOfHiddenLayers);
}

int Network :: numberOfConnections()
{
    return (totalNumberOfLayers() - 1);
}


void Network :: initializeNeurons(int bin, int record)
{   
    
    double* input = data->getInputEntryFromBin(bin, record);
    double* output = data->getOutputEntryFromBin(bin, record);

    for (int i = 0; i < numberOfInputNeurons; i++)
    {
        inputNeuronArray[i].setValue(input[i]);
#ifdef DEBUG
        cout << inputNeuronArray[i].getValue() << " ";
#endif
    }
        
    
    for (int i = 0; i < numberOfOutputNeurons; i++)
    {
        outputNeuronArray[i].setValue(output[i]);
#ifdef DEBUG
        cout << outputNeuronArray[i].getValue() << " " << endl;
#endif
    }
    
    
}

void Network :: initializeWeights()
{
    
#ifdef DEBUG
    cout << "\n\n--- ASSIGNING WEIGHT TO CONNECTIONS ---" << endl;
#endif

   ////////////// populate weights  //////////////
    for (int i = 0; i < numberOfConnections(); i++)
    {
        if (i == 0)
        {
            for (int j = 0; j < numberOfInputNeurons; j++)
                for (int k = 0; k < numberOfNeuronsPerHiddenLayer[i]; k++)
                {
                    connectionArray[i][j][k].setWeight(randomWeight());
#ifdef DEBUG
        cout << "Connection from layer " << i << " -> " << i+1 << " : Neuron " << j << " ->  " << k << " was assigned weight = "  << connectionArray[i][j][k].getWeight() << endl;
#endif
                }
        }
        
        else
        
            if (i == numberOfConnections() - 1)
            {
                for (int j = 0; j < numberOfNeuronsPerHiddenLayer[numberOfHiddenLayers - 1]; j++)
                    for (int k = 0; k < numberOfOutputNeurons; k++)
                    {
                        connectionArray[i][j][k].setWeight(randomWeight());
#ifdef DEBUG
        cout << "Connection from layer " << i << " -> " << i+1 << " : Neuron " << j << " ->  " << k << " was assigned weight = "  << connectionArray[i][j][k].getWeight() << endl;
#endif
                    }
            }
        
        else
            
            for (int j = 0; j < numberOfNeuronsPerHiddenLayer[i-1]; j++)
                for (int k = 0; k < numberOfNeuronsPerHiddenLayer[i]; k++)
                {
                    connectionArray[i][j][k].setWeight(randomWeight());
#ifdef DEBUG
        cout << "Connection from layer " << i << " -> " << i+1 << " : Neuron " << j << " ->  " << k << " was assigned weight = "  << connectionArray[i][j][k].getWeight() << endl;
#endif
                }     
    }
 
#ifdef DEBUG
    cout << "\n\n--- CONNECTING NEURONS BETWEEN LAYERS ---" << endl;
#endif
   
    
    //////////////// connect input layer to first hidden layer ////////////////
        
        for (int i = 0; i < numberOfInputNeurons; i++)
            for (int j = 0; j < numberOfNeuronsPerHiddenLayer[0]; j++)
            {
                connectionArray[0][i][j].connect(&inputNeuronArray[i],&hiddenNeuronArray[0][j]);
#ifdef DEBUG
        cout << "Connected, layer 0 -> 1 : Neuron " << i << " -> " << j << endl;
#endif
            }
    
    //////////////// connect remaining hidden layers  ////////////////
    
  if (numberOfHiddenLayers > 1) // connect inner hidden layers
    {
    for (int i = 1; i < numberOfHiddenLayers; i++)                                                  // iterate over all hidden layers
        for (int j = 0; j < numberOfNeuronsPerHiddenLayer[i-1]; j++)                                      // connect each node on outbound layer
            for (int k = 0; k < numberOfNeuronsPerHiddenLayer[i]; k++)                                //            to every node on the next layer (inbound) 
            {
                connectionArray[i][j][k].connect(&hiddenNeuronArray[i-1][j], &hiddenNeuronArray[i][k]);
#ifdef DEBUG
        cout << "Connected, layer " << i << " -> " << i+1 << " : Neuron " << j << " -> " << k << endl;
#endif
            }
    }
    
    //////////////// connect last hidden layer to output layer ////////////////
    
    for (int i = 0; i < numberOfNeuronsPerHiddenLayer[numberOfHiddenLayers-1]; i++)
        for (int j = 0; j < numberOfOutputNeurons; j++)
        {
           connectionArray[numberOfHiddenLayers][i][j].connect(&hiddenNeuronArray[numberOfHiddenLayers-1][i],&outputNeuronArray[j] );
#ifdef DEBUG
        cout << "Connected, layer " << numberOfHiddenLayers << " -> " << numberOfHiddenLayers+1 << " : Neuron " << i << " -> " << j << endl;
#endif
        }
    
    //////////////// set weights and connect bias neuron to hidden layers AND output layer  ////////////////

#ifdef DEBUG
    cout << "\n\n--- INITIALIZING AND CONNECTING BIAS NEURON TO OTHER NEURONS ---" << endl;
#endif
    
    for (int i = 0; i < numberOfConnections(); i++)
    {
        if (i == numberOfConnections() - 1)
            for (int j = 0; j < numberOfOutputNeurons; j++)
            {
                biasConnections[i][j].setWeight(randomWeight());
                biasConnections[i][j].connect(biasNeuron, &outputNeuronArray[j]);
                
#ifdef DEBUG
        cout << "Bias neuron connected to layer " << i+1 << "(output) Neuron #" << j << " : has weight " << biasConnections[i][j].getWeight() << endl;        
#endif
               
            }
        else
            for (int j = 0; j < numberOfNeuronsPerHiddenLayer[i]; j++)
            {
                biasConnections[i][j].setWeight(randomWeight());
                biasConnections[i][j].connect(biasNeuron, &hiddenNeuronArray[i][j]);
                
#ifdef DEBUG
        cout << "Bias neuron connected to layer " << i+1 << " Neuron #" << j << " : has weight " << biasConnections[i][j].getWeight() << endl;        
#endif
            }
    }
    
    
}

double Network :: feedForward(int bin, int entry)                               // this will start the forward propagation of values through the network
{
    double sum = 0;                                                             // stores summation of weights x values of neurons
    double curWeight;                                                           // store current weight of observed connection
    double curValue;                                                            // store current value of node observed
    
    double* error = new double[data->getNumOutputs()];                          // array that stores errors for every output observed
    
    // -------------------------------------------------------------------------------- //
    /* --- 1st hidden layer [0] gets updated from the input neurons AND BIAS NEURON --- */
    // -------------------------------------------------------------------------------- //
    
    for (int i = 0; i < numberOfNeuronsPerHiddenLayer[0]; i++)                  // for every neuron on the first hidden layer
    {
        
        for (int j = 0; j < numberOfInputNeurons; j++)                          // go over every neuron on the input layer
        {
            curWeight = connectionArray[0][j][i].getWeight();                   // get its weight
            curValue = connectionArray[0][j][i].getFromNeuron()->getValue();    // get its value
            sum += curWeight * curValue;                                        // multiply (do not run through sigmoid) and accumulate
#ifdef DEBUG
            cout << "Updating Layer #1" << " Neuron #" << i << " from Input #" << j << " v = " << curValue << " sig(v) = " << sigmoid(curValue) << " w = " << curWeight << " Sum = " << sum << endl;
#endif
        }
        
        curWeight = biasConnections[0][i].getWeight();                          // get weight of bias neuron
        curValue = biasNeuron->getValue();                                      // get value of bias neuron (1)
        sum += curWeight * curValue;                                            // add bias neuron x weight to accumulated sum
#ifdef DEBUG
        cout << "Updating Layer #1" << " Neuron #" << i << " from bias neuron" << " v = " << curValue << " sig(v) = " << sigmoid(curValue) << " w = " << curWeight << " Sum = " << sum << endl;
#endif
        
        
        
        hiddenNeuronArray[0][i].setValue(sum);                                  // set value of neuron to be the sum
#ifdef DEBUG
        cout << "Final value for Neuron #" << i << " on layer #1 is " << sum << endl;
#endif
        sum = 0;                                                                // reset sum for next iteration
        
    } //////////////////////////////////////////////////////////////////////////////////
    
    // ----------------------------------------------------------------------------------------------------------------------- //
    /* --- 2nd and subsequent hidden layers [1 to numHiddenLayers] gets updated from their previous layer AND BIAS NEURON  --- */
    // ----------------------------------------------------------------------------------------------------------------------- //
      
    
    if (numberOfHiddenLayers > 1)                                               // at least 2 hidden layers in network
    {
        for (int i = 1; i < numberOfHiddenLayers; i++)                          // for every hidden layer, starting on second hidden layer
        {
            for (int j = 0; j < numberOfNeuronsPerHiddenLayer[i]; j++)          // for every neuron on the layer that will be updated
            {
                for (int k = 0; k < numberOfNeuronsPerHiddenLayer[i-1]; k++)    // get outputs from previous hidden layer (i-1)
                {
                    curWeight = connectionArray[i][k][j].getWeight();           // get weight from layer i-1 -> i
                    curValue = connectionArray[i][k][j].getFromNeuron()->getValue(); // get the value of neuron k on layer i-1 to layer i                   
                    
                    sum += curWeight * sigmoid(curValue);                       // out value has to be run through sigmoid before being multiplied to weight
#ifdef DEBUG
            cout << "Updating Layer #" << i+1 << " Neuron #" << j << " from Neuron #" << k << "on Layer #" << i-1 << " v = " << curValue << " sig(v) = " << sigmoid(curValue) << " w = " << curWeight << " Sum = " << sum << endl;
#endif
                }

                curWeight = biasConnections[i][j].getWeight();                  // obtain bias connection to the layer being updated       
                curValue = biasNeuron->getValue();                              // get bias value (1)
                sum += curWeight * sigmoid(curValue);                           // add to summation
#ifdef DEBUG
            cout << "Updating Layer #" << i+1 << " Neuron #" << j << " from bias neuron" << " v = " << curValue << " sig(v) = " << sigmoid(curValue) << " w = " << curWeight << " Sum = " << sum << endl;
#endif

                hiddenNeuronArray[i][j].setValue(sum) ;                         // set value of neuron to be the the sum
#ifdef DEBUG
        cout << "Final value for Neuron #" << j << " on layer #" << i+1 << " is " << sum << endl;
#endif
                sum = 0;
            }
        }
    } //////////////////////////////////////////////////////////////////////////////////
    
    // ------------------------------------------------------------------------------------------------- //
    /* --- output layer gets updated from the last hidden layer [numHiddenLayers-1] AND BIAS NEURON  --- */
    // ------------------------------------------------------------------------------------------------- //
   
    for (int i = 0; i < numberOfOutputNeurons; i++)                             // for all neurons in the output layer
    {
        for (int j = 0; j < numberOfNeuronsPerHiddenLayer[numberOfHiddenLayers-1]; j++)         // loop over neurons from the last hidden layer
        {
            curWeight = connectionArray[numberOfHiddenLayers][j][i].getWeight();                // get connection from last layer to output
            curValue = connectionArray[numberOfHiddenLayers][j][i].getFromNeuron()->getValue(); // get value from last layer to output
            sum += curWeight * sigmoid(curValue);                                               // activate value before multiplying by weight
            
#ifdef DEBUG
            cout << "Updating Layer #" << numberOfHiddenLayers+1 << " Neuron #" << i << " from Neuron #" << j << "on Layer #" << numberOfHiddenLayers << " v = " << curValue << " sig(v) = " << sigmoid(curValue) << " w = " << curWeight << " Sum = " << sum << endl;
#endif
        }

        curWeight = biasConnections[numberOfHiddenLayers][i].getWeight();                       // get bias connections to output layer                         
        curValue = biasNeuron->getValue();                                                      // get bias neuron value
        sum += curWeight * sigmoid(curValue);                                                   // activate value and add to summation
        
#ifdef DEBUG
            cout << "Updating Layer #" << numberOfHiddenLayers+1 << " Neuron #" << i << " from bias neuron" << " v = " << curValue << " sig(v) = " << sigmoid(curValue) << " w = " << curWeight << " Sum = " << sum << endl;
#endif
        outputNeuronArray[i].setValue(sum);                                                     // set value of neuron to be the sum
        
#ifdef DEBUG
        cout << "Final value for Neuron #" << i << " on layer #" << numberOfHiddenLayers+1 << " is " << sigmoid(sum) << endl;
#endif
    
        sum = 0;                                                                // reset sum value 
        
    } //////////////////////////////////////////////////////////////////////////////////
    
    
    // ------------------------------------------------------------------------------------------------------- //
    /* --- calculate absolute and relative error on outputs, as well as overall and 'per output' average error */
    // ------------------------------------------------------------------------------------------------------- //
    
    double avgErrorOverOutputs = 0;
    
    for (int i = 0; i < data->getNumOutputs(); i++)                             // multiply output by scaling factor and compare with expected value
    {
        error[i] = fabs( (sigmoid(outputNeuronArray[i].getValue())*data->getScalingFactor()) - (data->getOutputEntryFromBin(bin,entry)[i]) );
        avgErrorOverOutputs += error[i];                                        // add errors on all outputs
    }

    avgErrorOverOutputs /= data->getNumOutputs();                                          // take the mean of all output errors
    
    return avgErrorOverOutputs;
    
}

void Network :: backPropagateWithDerivative(int bin, int entry)
{
    double obtainedValue, expectedValue, obtainedScaled, sigmoidScaling, error, outError, weightAdjustment, accumulator;
    
    #ifdef DEBUG
      cout << "\n\n--- BACKPROPAGATING ---" << endl;
      cout << "Bin: " << bin+1 << " Entry #" << entry+1 << endl;
#endif 
    // calculate errors on output layer
    
    for (int i = 0; i < numberOfOutputNeurons; i++)
    {
        expectedValue = data->getOutputEntryFromBin(bin, entry)[i];              // get the real expected value from data set
        obtainedValue = sigmoid(outputNeuronArray[i].getValue());               // run the output through sigmoid to get the real calculated value
        obtainedScaled = obtainedValue * data->getScalingFactor();
        
        outError = fabs((expectedValue - obtainedScaled)/expectedValue);
        //sigmoidScaling = obtainedValue * (1 - obtainedValue);                   // some math to level out the output
        error = sigmoidPrime(obtainedValue) * (expectedValue - obtainedScaled);               // the error of the obtained value in contrast to the expected value is the scaling times difference
        outputNeuronArray->setDelta(error);                                          // save this value locally in the neuron
        
        
#ifdef DEBUG
    cout << "At output layer, output neuron #" << i << " act'(value): " << sigmoidPrime(obtainedValue) << " expected: " << expectedValue << " got (scaled): " <<
            obtainedScaled << " output error: " << outError << " error (delta): " << error << endl;
#endif
    }
    
   // correct from here on down
      
      
    // loop from last hidden layer to first hidden layer
    //    update weights departing from layer
    //    calculate error for neurons (so previous layer can adjust weights)
    
    if (numberOfHiddenLayers >= 1)                                              // when dealing with a network with at least ONE hidden layer (most common, there will rarely be a network with no hidden layers/neurons)
    {      
        for (int i = numberOfHiddenLayers - 1; i >= 0; i--)                     // loop through hidden layers
        {
          // update weights departing from hidden layer i
            for (int j = 0; j < numberOfNeuronsPerHiddenLayer[i]; j++)          // loop through neurons in a layer
            {
                if (i == (numberOfHiddenLayers - 1))                            // if in the last hidden layer, get deltas from output layer
                {
                    for (int k = 0; k < numberOfOutputNeurons; k ++)            // loop over output neurons
                    {
                        weightAdjustment = learningRate * hiddenNeuronArray[i][j].getValue() * outputNeuronArray[k].getDelta();    // calculate adjustment using deltas from output layer
                        connectionArray[i+1][j][k].setWeight( connectionArray[i+1][j][k].getWeight() + weightAdjustment  );                 // add adjustment to current weight
#ifdef DEBUG
    cout << "At layer #" << i+1 << " neuron " << j << " -> " << k << " weight adjustment: " << weightAdjustment << " new weight: " << connectionArray[i+1][j][k].getWeight() << endl; 
#endif 
    
                        weightAdjustment = learningRate * biasNeuron->getValue() * outputNeuronArray[k].getDelta();                // prepare weight adjustment for bias connection
                        biasConnections[i+1][k].setWeight( biasConnections[i+1][k].getWeight() + weightAdjustment);
#ifdef DEBUG
    cout << "At layer #" << i+1 << " bias connected to neuron " << k << " weight adjustment: " << weightAdjustment << " new weight: " << biasConnections[i+1][k].getWeight() << endl;
#endif
                    }   
                }
                else                                                            // when subsequent layer is a hidden layer
                {
                    for (int k = 0; k < numberOfNeuronsPerHiddenLayer[i+1]; k++)// loop over neurons in next layer
                    {
                        weightAdjustment = learningRate * hiddenNeuronArray[i][j].getValue() * hiddenNeuronArray[i+1][k].getDelta();
                        connectionArray[i+1][j][k].setWeight( connectionArray[i+1][j][k].getWeight() + weightAdjustment );
                        
#ifdef DEBUG
    cout << "At layer #" << i+1 << " neuron " << j << " -> " << k << " weight adjustment: " << weightAdjustment << " new weight: " << connectionArray[i+1][j][k].getWeight() << endl; 
#endif 
                        weightAdjustment = learningRate * biasNeuron->getValue() * hiddenNeuronArray[i+1][k].getDelta();                   // prepare weight adjustment for bias connection
                        biasConnections[i+1][k].setWeight( biasConnections[i+1][k].getWeight() + weightAdjustment);
                        
#ifdef DEBUG
    cout << "At layer #" << i+1 << " bias connected to neuron " << k << " weight adjustment: " << weightAdjustment << " new weight: " << biasConnections[i+1][k].getWeight() << endl;
#endif
                    }
                }
                
            }                                                                   // done updating weights for current layer
            
          //with updated weights, now calculate deltas for current layer
            for (int j = 0; j < numberOfNeuronsPerHiddenLayer[i]; j++)
            {
                obtainedValue = sigmoid(hiddenNeuronArray[i][j].getValue());
                //sigmoidScaling = obtainedValue * (1 - obtainedValue);
                
                if (i == numberOfHiddenLayers - 1)                              // if located on the last (or only) hidden layer, sum over outputs
                   for (int k = 0; k < numberOfOutputNeurons; k++)                    
                        accumulator += connectionArray[i+1][j][k].getWeight() * outputNeuronArray[k].getDelta();        // sum gets updated with product of weights and deltas on the output layer
                else
                    for (int k = 0; k < numberOfNeuronsPerHiddenLayer[i+1]; k++)
                        accumulator += connectionArray[i+1][j][k].getWeight() * hiddenNeuronArray[i+1][k].getDelta();
                
                error = sigmoidPrime(obtainedValue) * accumulator;
                
#ifdef DEBUG
    cout << "At layer #" << i+1 << " neuron #" << j << " act'(value): " << sigmoidPrime(obtainedValue) << " calculated error: " << error << endl;
#endif               
                hiddenNeuronArray[i][j].setDelta(error);
            }  
        }
        
        // weights for all hidden layer connections and deltas up to the first hidden layer calculated
        // last step is update weights departing from input layer to first hidden layer
        
        for (int j = 0; j < numberOfInputNeurons; j++)
            for (int k = 0; k < numberOfNeuronsPerHiddenLayer[0]; k++)
            {
                weightAdjustment = learningRate * inputNeuronArray[j].getValue() * hiddenNeuronArray[0][k].getDelta();
                connectionArray[0][j][k].setWeight( connectionArray[0][j][k].getWeight() + weightAdjustment);
                
#ifdef DEBUG
                cout << "At input layer, neuron #" << j << " -> " << k << " weight adjustment: " << weightAdjustment << " new weight: " << connectionArray[0][j][k].getWeight() << endl;
#endif
                
                weightAdjustment = learningRate * biasNeuron->getValue() * hiddenNeuronArray[0][k].getDelta();
                biasConnections[0][k].setWeight( biasConnections[0][k].getWeight() + weightAdjustment);
                
#ifdef DEBUG
    cout << "At layer #1 bias connected to neuron " << k << " weight adjustment: " << weightAdjustment << " new weight: " << biasConnections[0][k].getWeight() << endl;
#endif
            }
        
    }
    
      
}

void Network :: backPropagate(int bin, int entry)
{
    double obtainedValue, expectedValue, obtainedScaled, sigmoidScaling, error, outError, weightAdjustment, accumulator;
    
#ifdef DEBUG
      cout << "\n\n--- BACKPROPAGATING ---" << endl;
      cout << "Bin: " << bin+1 << " Entry #" << entry+1 << endl;
#endif 
    // calculate errors on output layer
    
    for (int i = 0; i < numberOfOutputNeurons; i++)
    {
        expectedValue = data->getOutputEntryFromBin(bin, entry)[i];              // get the real expected value from data set
        obtainedValue = sigmoid(outputNeuronArray[i].getValue());               // run the output through sigmoid to get the real calculated value
        obtainedScaled = obtainedValue * data->getScalingFactor();
        
        outError = fabs((expectedValue - obtainedScaled)/expectedValue);
        sigmoidScaling = obtainedValue * (1 - obtainedValue);                   // some math to level out the output
        error = sigmoidScaling * (expectedValue - obtainedScaled);               // the error of the obtained value in contrast to the expected value is the scaling times difference
        outputNeuronArray->setDelta(error);                                          // save this value locally in the neuron
        
        
#ifdef DEBUG
    cout << "At output layer, output neuron #" << i << " sigmoid scaling: " << sigmoidScaling << " expected: " << expectedValue << " got (scaled): " <<
            obtainedScaled << " output error: " << outError << " error (delta): " << error << endl;
#endif
    }
    
    // loop from last hidden layer to first hidden layer
    //    update weights departing from layer
    //    calculate error for neurons (so previous layer can adjust weights)
    
    if (numberOfHiddenLayers >= 1)                                              // when dealing with a network with at least ONE hidden layer (most common, there will rarely be a network with no hidden layers/neurons)
    {      
        for (int i = numberOfHiddenLayers - 1; i >= 0; i--)                     // loop through hidden layers
        {
          // update weights departing from hidden layer i
            for (int j = 0; j < numberOfNeuronsPerHiddenLayer[i]; j++)          // loop through neurons in a layer
            {
                if (i == (numberOfHiddenLayers - 1))                            // if in the last hidden layer, get deltas from output layer
                {
                    for (int k = 0; k < numberOfOutputNeurons; k ++)            // loop over output neurons
                    {
                        weightAdjustment = learningRate * hiddenNeuronArray[i][j].getValue() * outputNeuronArray[k].getDelta();    // calculate adjustment using deltas from output layer
                        connectionArray[i+1][j][k].setWeight( connectionArray[i+1][j][k].getWeight() + weightAdjustment  );                 // add adjustment to current weight
#ifdef DEBUG
    cout << "At layer #" << i+1 << " neuron " << j << " -> " << k << " weight adjustment: " << weightAdjustment << " new weight: " << connectionArray[i+1][j][k].getWeight() << endl; 
#endif 
    
                        weightAdjustment = learningRate * biasNeuron->getValue() * outputNeuronArray[k].getDelta();                // prepare weight adjustment for bias connection
                        biasConnections[i+1][k].setWeight( biasConnections[i+1][k].getWeight() + weightAdjustment);
#ifdef DEBUG
    cout << "At layer #" << i+1 << " bias connected to neuron " << k << " weight adjustment: " << weightAdjustment << " new weight: " << biasConnections[i+1][k].getWeight() << endl;
#endif
                    }   
                }
                else                                                            // when subsequent layer is a hidden layer
                {
                    for (int k = 0; k < numberOfNeuronsPerHiddenLayer[i+1]; k++)// loop over neurons in next layer
                    {
                        weightAdjustment = learningRate * hiddenNeuronArray[i][j].getValue() * hiddenNeuronArray[i+1][k].getDelta();
                        connectionArray[i+1][j][k].setWeight( connectionArray[i+1][j][k].getWeight() + weightAdjustment );
                        
#ifdef DEBUG
    cout << "At layer #" << i+1 << " neuron " << j << " -> " << k << " weight adjustment: " << weightAdjustment << " new weight: " << connectionArray[i+1][j][k].getWeight() << endl; 
#endif 
                        weightAdjustment = learningRate * biasNeuron->getValue() * hiddenNeuronArray[i+1][k].getDelta();                   // prepare weight adjustment for bias connection
                        biasConnections[i+1][k].setWeight( biasConnections[i+1][k].getWeight() + weightAdjustment);
                        
#ifdef DEBUG
    cout << "At layer #" << i+1 << " bias connected to neuron " << k << " weight adjustment: " << weightAdjustment << " new weight: " << biasConnections[i+1][k].getWeight() << endl;
#endif
                    }
                }
                
            }                                                                   // done updating weights for current layer
            
          //with updated weights, now calculate deltas for current layer
            for (int j = 0; j < numberOfNeuronsPerHiddenLayer[i]; j++)
            {
                obtainedValue = sigmoid(hiddenNeuronArray[i][j].getValue());
                sigmoidScaling = obtainedValue * (1 - obtainedValue);
                
                if (i == numberOfHiddenLayers - 1)                              // if located on the last (or only) hidden layer, sum over outputs
                   for (int k = 0; k < numberOfOutputNeurons; k++)                    
                        accumulator += connectionArray[i+1][j][k].getWeight() * outputNeuronArray[k].getDelta();        // sum gets updated with product of weights and deltas on the output layer
                else
                    for (int k = 0; k < numberOfNeuronsPerHiddenLayer[i+1]; k++)
                        accumulator += connectionArray[i+1][j][k].getWeight() * hiddenNeuronArray[i+1][k].getDelta();
                
                error = sigmoidScaling * accumulator;
                
#ifdef DEBUG
    cout << "At layer #" << i+1 << " neuron #" << j << " sigmoid scaling: " << sigmoidScaling << " calculated error: " << error << endl;
#endif               
                hiddenNeuronArray[i][j].setDelta(error);
            }  
        }
        
        // weights for all hidden layer connections and deltas up to the first hidden layer calculated
        // last step is update weights departing from input layer to first hidden layer
        
        for (int j = 0; j < numberOfInputNeurons; j++)
            for (int k = 0; k < numberOfNeuronsPerHiddenLayer[0]; k++)
            {
                weightAdjustment = learningRate * inputNeuronArray[j].getValue() * hiddenNeuronArray[0][k].getDelta();
                connectionArray[0][j][k].setWeight( connectionArray[0][j][k].getWeight() + weightAdjustment);
                
#ifdef DEBUG
                cout << "At input layer, neuron #" << j << " -> " << k << " weight adjustment: " << weightAdjustment << " new weight: " << connectionArray[0][j][k].getWeight() << endl;
#endif
                
                weightAdjustment = learningRate * biasNeuron->getValue() * hiddenNeuronArray[0][k].getDelta();
                biasConnections[0][k].setWeight( biasConnections[0][k].getWeight() + weightAdjustment);
                
#ifdef DEBUG
    cout << "At layer #1 bias connected to neuron " << k << " weight adjustment: " << weightAdjustment << " new weight: " << biasConnections[0][k].getWeight() << endl;
#endif
            }
        
    }
    
      getchar();
    
}
        
double Network :: randomWeight()
{
    int temp;
    double tempDouble;

        temp = rand()%100+1;
        tempDouble = (double) temp;
        tempDouble = tempDouble/500.0;
        tempDouble -= .1;
    
        return tempDouble;
    
}

double Network :: sigmoidPrime(double value)
{
    double numerator = pow(M_E,-value);
    double denominator = pow( (1+ pow(M_E,-value)) ,2);
    return numerator/denominator;
}

double Network :: runNetwork()
{
    int numEpochs = 0;
    double error = 1.0;
    
#ifdef DEBUG
    cout << "--- RUNNING NEURAL NETWORK ---" << endl;
#endif
    
    if (data->getNumBins() == 1)                                                // run using whole DataSet till error drops below tolerance
    {
        currentValidationSet = 99999;                                           // different value, because training set = validation set
        
#ifdef DEBUG
    cout << "Number of bins: " << data->getNumBins() << " % error desired: " << errorTolerance << endl;
    cout << endl;
#endif     
        
        while (error > errorTolerance)                                          // until tolerance is met
        {
            trainNetwork();                                                     // perform training
            error = validateNetwork();                                          // validate the network and accumulate error
            numEpochs++;                                                        // increase number of iterations
            
            cout << "Epoch #" << numEpochs+1 << " % error obtained: " << error << endl;
        }
    }
    else                                                                        // k-fold cross validation
    {
        while (error > errorTolerance)
        {
            for (int i = 0; i < data->getNumBins(); i++)                        // run k times
            {
                currentValidationSet = i;                                       // define which bin is validation set
                trainNetwork();                                                 // perform training (the method decides which inputs to feed)
                getchar();
                error = validateNetwork();                                      // validate the network and accumulate error for every run
            }
            cout << "Epoch #" << numEpochs+1 << "% error obtained: " << error << endl;
        }
    }
    
    return numEpochs+1;
}

void Network :: trainNetwork()                                                  // feeds inputs from k-1 bins (all but the currentValidationSet)
{
    
    for (int bin = 0; bin < data->getNumBins(); bin++)                          // for all bins (1 or more)
    {  
        for (int entry = 0; entry < data->getNumItemsPerBin(); entry++)         // for all items in this bin
        {
            if (bin != currentValidationSet)                                    // for all bins except the validation bin, train the network
            {
#ifdef DEBUG
      cout << "--- INITIALIZING NEURONS ---" << endl;
      cout << "Bin: " << bin+1 << " Entry #" << entry+1 << endl;
#endif      
                initializeNeurons(bin, entry);                                  // load i/o from bin  
                feedForward(bin, entry);                                        // do feed forward process (bin, entry variables used for referencing active entry for error calculation)
                backPropagateWithDerivative(bin, entry);                        // back propagate error
                getchar();
            }
        }
    }
}

double Network :: validateNetwork()
{
    double totalError = 0;
    
    for (int entry = 0; entry < data->getNumItemsPerBin(); entry++)             // for all items in a bin
    {
        initializeNeurons(currentValidationSet, entry);                         // initialize the neurons with the entries in the validation set
        totalError += feedForward(currentValidationSet, entry);                 // accumulate error for all entries (already averaged over outputs)                         
        
        getchar();
        
    }
    
    totalError /= data->getNumItemsPerBin();                                    // divide total error by the number of elements in a bin (if 1, numEntries, if k, numEntries/k)
    
    return totalError;                                                          // error averaged over outputs, then averaged over numEntriesPerBin
    
}

