//
//  neuralnetwork.cpp
//  neuralnetwork
//
//  Created by tianshuai on 7/13/15.
//

#include "neuralnetwork.h"

float Node::alpha =  0.40;
float Node::eta   =  0.10;

Node::Node(int OutCount, int idx)
{
    for(int i = 0; i < OutCount; ++i)
    {
        Link con;
        con.DerivWeight = 0;
        con.weight = rand0to1();
        
        OutWeights.push_back(con);
    }
    
    index = idx;
}

void Node::FeedFwd(const std::vector<Node>& prevLevel)
{
    double v = 0.0;
    
    for(int n = 0; n < prevLevel.size(); ++n)
    {
        v += prevLevel[n].getOutValue() * prevLevel[n].OutWeights[index].weight;
    }
    
    setOut(TransFunc(v));
}

void Node::calcOutGradients(double GoalValue)
{
    double delta = GoalValue - OutValue;
    gradient = delta * TransFuncDer(OutValue);
}

double Node::sumDerivWeights(const std::vector<Node>& nextLevel) const
{
    /* Sum errors of the nodes */
    double DerivWeights = 0;
    
    for(int i = 0; i < nextLevel.size() - 1; ++i)
    {
        DerivWeights += OutWeights[i].weight * nextLevel[i].gradient;
    }
    
    return DerivWeights;
}

void Node::calcHiddenGradients(const std::vector<Node>& nextLevel)
{
    double DerivWeights = sumDerivWeights(nextLevel);
    gradient = DerivWeights * TransFuncDer(OutValue);
}

void Node::updateInWeights(std::vector<Node>& prevLevel)
{
    for(int i = 0; i < prevLevel.size(); ++i)
    {
        Node& Node = prevLevel[i];
        double oldDerivWeight = Node.OutWeights[index].DerivWeight;
        double newDerivWeight = eta * Node.OutValue * gradient + alpha * oldDerivWeight;
        
        Node.OutWeights[index].DerivWeight = newDerivWeight;
        Node.OutWeights[index].weight += newDerivWeight;
    }
}


NeuralNetwork::NeuralNetwork(const Topology& Topol)
{
    int levelCount = Topol.size();
    
    for(int levelNum = 0; levelNum < levelCount; ++levelNum)
    {
        levels.push_back(Level());
        int OutCount = levelNum == levelCount - 1 ? 0 : Topol[levelNum+1];
        /* question mark means 0 if else */
        
        Level& currentLevel = levels.back();
        
        for(int n = 0; n <= Topol[levelNum]; ++n)i
	{
            currentLevel.push_back(Node(OutCount, n));
        }
        
        currentLevel.back().setOut(1.0);
    }
    srand(time(NULL));
}

void NeuralNetwork::FeedForward(const Value& InVals)
{
    for(int i = 0; i < InVals.size(); ++i)
    {
        levels[0][i].setOut(InVals[i]);
    }
    
    for(int levelNum = 1; levelNum < levels.size(); ++levelNum)
    {
        Level& level = levels[levelNum];
        const Level& lastLevel = levels[levelNum - 1];
        
        for(int n = 0; n < level.size() - 1; ++n)
        {
            level[n].FeedFwd(lastLevel);
        }
    }
}

void NeuralNetwork::BackPropagation(const Value& Goal)
{
    /* Calc RMS error */
    Level& OutLevel = levels.back();
    
    error = 0.0;/* initialize */
    
    for(int i = 0; i < Goal.size(); ++i)
    {
        double delta = Goal[i] - OutLevel[i].getOutValue();
        error += delta * delta;
    }
    
    error = std::sqrt(error / Goal.size());
    
    /* current error */
    DisplayError = (DisplayError * DisplaySmoothingFactor + error)  / (DisplaySmoothingFactor + 1.0);
    
    /* output gradient */
    for(int i = 0; i < OutLevel.size() - 1; ++i)
    {
        OutLevel[i].calcOutGradients(Goal[i]);
    }
    
    /* hidden gradients */
    for(int i = levels.size() - 2; i > 0; --i)
    {
        Level& level = levels[i];
        Level& nextLevel = levels[i+1];
        
        for(int j = 0; j < level.size(); ++j)
        {
            level[j].calcHiddenGradients(nextLevel);
        }
    }
    
    /* Update weights */
    for(int i = levels.size() - 1; i > 0; i--)
    {
        Level& level     = levels[i];
        Level& prevLevel = levels[i-1];
        
        for(int j = 0; j < level.size() - 1; ++j)
        {
            level[j].updateInWeights(prevLevel);
        }
    }
}

void NeuralNetwork::getOutput(Value& results) const
{
    results.clear();
    const Level& OutLevel = levels.back();
    for(int i = 0; i < OutLevel.size() - 1; ++i)
    {
        results.push_back(OutLevel[i].getOutValue());
    }
}

void NeuralNetwork::train(Value&& In, Value&& Goal)
{
    FeedForward(In);
    BackPropagation(Goal);
}

Value NeuralNetwork::run(Value&& In)
{
    FeedForward(In);
    Value r;
    getOutput(r);
    return r;
}
