

#include "rtrl.h"

Matrix weights,reservoirState,outputState;

using namespace std;


RTRL::RTRL(int num_Sensors,int num_Motors) 
{
   //Initialise structure of the network 
    numInputNodes=num_Sensors;
    numReservoirNodes=30;
    numOutputNodes=num_Motors;
   
    //Set the initial weights within interval [-0.1,0.1] 
    weights.set(numReservoirNodes+numOutputNodes,numReservoirNodes+numOutputNodes+numInputNodes);
    
     for (int i=0; i<numReservoirNodes+numOutputNodes; i++)
    {
        for (int j=0; j<numReservoirNodes+numOutputNodes+numInputNodes; j++)
        { 
        
        double rndNum=(rand()%100)-50 ;
        double w=(rndNum/1000);
        weights.val(i,j)=w;
        }
        
        
    }
    
    nodeState.set(weights.getM(),1);
    learningRate=0.03;
  
    desiredSens.set(weights.getM(),1);
    //Create the vector for saving the sensitivity 
    Matrix piFactor(weights.getM(),weights.getN());
    nodeSensitivity.assign(numReservoirNodes+numOutputNodes,piFactor);
}


RTRL::~RTRL() {
}

Matrix RTRL::simulateNet()
{
    Matrix updatedState=weights*nodeState;
    return updatedState;
}

 double RTRL::squash(double node)
{
 return tanh(node);   
    
}

 void RTRL::setState(Matrix motors)
 {      
    
    nodeState.addRows(motors.getM(),0);
    
     for (unsigned int i=0; i<motors.getM(); i++ )
        {
        nodeState.val(numOutputNodes+numReservoirNodes+i,0)=motors.val(i,0);
        }

 }
 
 void RTRL::setDesiredOut (Matrix fut_sensor)
 {
     
     for (unsigned int i=0; i<fut_sensor.getM(); i++)
     {  
         desiredSens.val(i,0)=fut_sensor.val(i,0);
     }
     
 }
 
 void RTRL::updateWeights(Matrix output)
 {
      
      Matrix weightChange(weights.getM(),weights.getN());
      
      Matrix error=desiredSens-output;
   
    for (unsigned int i=0; i<error.getM(); i++) 
    {   
        weightChange+=nodeSensitivity[i]*error.val(i,0);
        
    }
    weights+=weightChange*learningRate;
    
     
 }
 
 double RTRL::squash_derivative(double node)
{
 double k=tanh(node);
    return 1.2 - k*k;
    
}

 
 Matrix RTRL::getModelMatrix (Matrix input)
{
    //Partition weights matrix 
     Matrix inputWeights=weights.rows(numOutputNodes,numOutputNodes+numReservoirNodes);
    inputWeights=inputWeights.columns(numReservoirNodes+numOutputNodes,inputWeights.getN()-1);
     
    Matrix ESNWeights =weights.rows(numOutputNodes,weights.getM()-1);
    ESNWeights=ESNWeights.columns(numOutputNodes,numOutputNodes+numReservoirNodes-1);
   
    Matrix ESNState= nodeState.rows(numOutputNodes,numReservoirNodes+numInputNodes-1);
    
    Matrix outputWeights=weights.columns(numOutputNodes,numReservoirNodes+numOutputNodes-1);
    outputWeights=outputWeights.rows(0,numOutputNodes-1);
   
   
    
    Matrix outputDirectWeights=weights.columns(numReservoirNodes+numOutputNodes,weights.getN()-1);
    outputDirectWeights=outputDirectWeights.rows(0,numOutputNodes-1);
   
   
    Matrix selfOutWeights=weights.rows(0,numOutputNodes-1);
    selfOutWeights=selfOutWeights.columns(0,numOutputNodes-1);
    
   //Calculate Jacobian
    Matrix RNNActivations = inputWeights*input+ESNWeights*ESNState;
  
     
  const Matrix& g_prime = RNNActivations.map(&squash_derivative);
        
 
  
  return (outputWeights * (inputWeights & g_prime)) + outputDirectWeights;
  
}
 
 int RTRL::kronneckerDelta (int i, int k)
 {
     if (i==k) return 1;
     else return 0;
 }
 //The RTRL rule. TODO: use of matrices saves time
 void RTRL::getNodeSensitivity(Matrix updatedNodes,Matrix nodesValue)
 {
     Matrix internalWeights=weights.columns(0,numReservoirNodes+numOutputNodes-1);
     for (int node=0; node<numReservoirNodes+numOutputNodes; node++)
     {
         double nodeDeriv=squash_derivative(updatedNodes.val(node,0));
         for (int rowElem=0; rowElem<numReservoirNodes+numOutputNodes; rowElem++)
         {
             for (int colElem=0; colElem<weights.getN(); colElem++)
             {
                 double summation=0;
                 for (int l=0; l<numReservoirNodes+numOutputNodes; l++)
                 {
                     summation+=internalWeights.val(node,l)*nodeSensitivity[l].val(rowElem,colElem);
                 }
                 nodeSensitivity[node].val(rowElem,colElem)=nodeDeriv*
                                    (summation+(kronneckerDelta(rowElem,node)*nodesValue.val(colElem,0)));
             }
         }
     }
     
 }
 
Matrix RTRL::predict()
{
    //Node value before activation..
   Matrix currentNodes=nodeState;
   //..and after activation
   Matrix updatedNodes=simulateNet();
   getNodeSensitivity(updatedNodes,currentNodes);
  
   updateWeights(updatedNodes);
   
   nodeState=updatedNodes.map(&squash);
   //return only values of output nodes
   return nodeState.rows(0,numOutputNodes-1);
}
