/* 
 * File:   lwr.cpp
 * Author: Athanasios Polydoros
 * 
 * Created on 09 July 2013, 19:29
 */

#include "lwr.h"

LWR::LWR(int numSensors,int numMotors)
{   //initialization of members
    inputNum=numMotors;
    outputNum=numSensors;
    inputHistory.set(1,inputNum+1);
    outputHistory.set(1,outputNum);
    desiredOutput.set(1,outputNum);
    currentInput.set(1,inputNum+1);
    regressionCoeff.set(inputNum+1,outputNum);
   
}


LWR::~LWR() {
}
void LWR::addToMemory(Matrix input,Matrix desiredOut)
{
    //maintain a history of the last 2000 inputs-outputs
 if (inputHistory.getM()<2000)
    {
       inputHistory.addRows(1,input);
       outputHistory.addRows(1,desiredOut);
          
    }
    else
    {
        Matrix temp;
        temp=inputHistory.rows(1,1999);
        inputHistory.addRows(1,input);
         inputHistory=temp;
         temp=outputHistory.rows(1,1999);
        outputHistory.addRows(1,desiredOut);
         outputHistory=temp;
    }
}
 void LWR::setState(Matrix motors)
 {
     currentInput.val(0,0)=1;
    
      for (int i=1; i<inputNum+1; i++)
        {
        currentInput.val(0,i)=motors.val(i-1,0);    
        
         } 
        
      
 }
 double LWR::euclideanDistance(Matrix vector1,Matrix vector2)
 {
     Matrix difference=vector1-vector2;
     Matrix squareDiff(1,vector1.getN());
     for (int i=0; i<vector1.getN(); i++)
     {
         squareDiff.val(0,i)=(difference.val(0,i)*difference.val(0,i));
     }
     double sum=squareDiff.elementSum();
     double distance=sqrt(sum);
     
     return distance;
 
 }
    void LWR::setDesiredOut(Matrix sensors)
    {
    desiredOutput=sensors^T;
    }
    void LWR::findWeights()
    { weights.set(inputHistory.getM(),inputHistory.getM());
        
        for (int i=0; i<inputHistory.getM(); i++)
        {   
            
           double distance=euclideanDistance(inputHistory.row(i),currentInput);
           double kernel=exp(-((distance*distance)/(2*1.7*1/*1.7*/)));
            weights.val(i,i)=sqrt(kernel);
        
        }
    
    
    }
void LWR::weightData()
{   weightedInputs.set(inputHistory.getM(),inputNum+1);
    weightedOutputs.set(outputHistory.getM(),outputNum);
    Matrix tempIn,tempOut;
    for (int i=0; i<inputHistory.getM(); i++)
    {   
        for (int j=0; j<outputHistory.getN(); j++)
        {
        
        
        weightedInputs.val(i,j)=inputHistory.val(i,j)*weights.val(i,i);
        weightedOutputs.val(i,j)=outputHistory.val(i,j)*weights.val(i,i);
      
        }
        weightedInputs.val(i,inputNum)=inputHistory.val(i,inputNum)*weights.val(i,i);
    }
}
Matrix LWR::getModelMatrix()
{
    Matrix modelMatrix(inputNum,outputNum);
    //used for avoiding sigular matrix
     if (inputHistory.getM()<3)
    {
         return modelMatrix;
         
     }
     else
     {
     return  regressionCoeff.rows(1,regressionCoeff.getM()-1);
    
     }
}
void LWR::findRegressionCoefficieents()
{
    regressionCoeff=weightedInputs.pseudoInverse()*weightedOutputs;
    
}
Matrix LWR::predict()
{   //make a prediction only if you sufficient history 
    Matrix out(outputNum,1);
    if (inputHistory.getM()<2)
    {
        addToMemory(currentInput,desiredOutput);
    return out;
    }
    else 
    {  out=(currentInput*regressionCoeff)^T;
        findWeights();
        weightData();
        findRegressionCoefficieents();
       
        
         addToMemory(currentInput,desiredOutput);
         return out;
    }
}
