/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

package nnet2.net;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.Random;

import nnet2.learning.TestMethods;
import nnet2.net.functions.ActivationFunction;
import nnet2.net.functions.Sigmoid;

/**
 *
 * @author kalinskia
 */
public class RecursiveMultiLayerPerceptron {
    private double[][] w1;  // input-hidden weights
    private double[] w2;    // hidden-ouput weights
    private InputLayer input; // input including recursive memory of input and
                              // output

    private final int inMemorySize;
    private final int outMemorySize;

    private double [] u;
    private double g;

    private int t = 0;  // iteration number (time)

    private ActivationFunction activationFunction = new Sigmoid(1);
    private double leaningRate = 0.7;

    private LinkedList<Double> w2Buffer = new LinkedList<Double>();

    private ArrayList<Double> errorLog = new ArrayList<Double>();


    public RecursiveMultiLayerPerceptron(int inMemory, int outMemory, int hidden, boolean bias){
        this.inMemorySize = inMemory;
        this.outMemorySize = outMemory;
        input = new InputLayer(1, inMemory, outMemory, bias);
        w1 = randomWeights(input.getSize(), hidden);
        w2 = randomWeights(hidden);
    }
    
    /**
     * Applies parameter p and calculates prediction for next time point
     * @param d
     * @return prediction for the next time point
     */
    public double nextForecast(double d){
        g = calculateAllLayers(d);
        input.pushInput(d);
        input.pushOutput(getCurrentForecast());

        return getCurrentForecast();
    }

    /**
     * Predicst and learns. It takes a sample and what should be returned by the
     * neural net. In response it returns the error
     * @param sample
     * @param correctResponse
     * @return error
     */
    public double nextTeachingSample(double sample, double correctResponse){
        g = calculateAllLayers(sample);
        double error = getCurrentForecast() - correctResponse;
        errorLog.add(error);
        double[][] w1clone = cloneW1();
        double[] w2clone = cloneW2();
        
        for(int a = 0; a < w2.length;a++)
            w2clone[a] += -this.getLeaningRate()*error*gradientW2(a);
        

        for(int a = 0; a < w1[0].length; a++)
            for(int b = 0; b < w1.length; b++){
                w1clone[b][a] += -this.getLeaningRate()*error*gradientW1(a, b);
            }

        this.w1 = w1clone;
        this.w2 = w2clone;

        input.pushInput(sample);
        input.pushOutput(getCurrentForecast());
        return error;
    }

    // <editor-fold defaultstate="collapsed" desc="Gradients">
    public double gradientW1(int a, int b){
        int K = w1[0].length; // size of hidden layer
        int P = this.outMemorySize; // output memory size
        int N = this.inMemorySize + 1; // input memory size + input
        double d = activationFunction.df(g);
        double  innerSum, outerSum = 0;
        for (int i = 1; i < K; i++) {
            innerSum = 0;
            for (int j = 1; j < P; j++) {
                innerSum += w1[j+N][i]*w2at(t - P - 1 + j);
            }
            if(i == a) innerSum += input.get(b);
            outerSum += w2[i]*activationFunction.df(u[i])*innerSum;
        }
        d *= outerSum;
        return d;
    }

    public double gradientW2(int a){
        int K = w1[0].length; // size of hidden layer
        int P = this.outMemorySize; // output memory size
        int N = this.inMemorySize + 1; // input memory size + input
        double d = activationFunction.df(g);
        double  innerSum, outerSum = 0;
        for (int i = 0; i < K; i++) {
            innerSum = 0;
            for (int j = 1; j < P; j++) {
                innerSum += w1[j+N][i]*w2at(t - P - 1 + j);
            }
            outerSum += w2[i]*activationFunction.df(u[i])*innerSum;
        }
        d *= v(a) + outerSum;

        return d;
    }
    // </editor-fold>
    
    // <editor-fold defaultstate="collapsed" desc="Layers calculation">
    private double calculateAllLayers(double d) {
        input.process(new double[]{d});
        u = computeInputHidden();
        g = computeHiddenOutput(u);
        return g;
    }

    private double computeHiddenOutput(double[] u) {
        double tempG = 0;
        for (int i = 0; i < w2.length; i++) {
            tempG += w2[i] * getActivationFunction().f(u[i]);
        }
        return tempG;
    }

    private double[] computeInputHidden() {
        double[] u = new double[w1[0].length];
        for (int i = 0; i < w1[0].length; i++) {
            // for each hidden neuron
            for (int j = 0; j < w1.length; j++) {
                // for each input neuron
                u[i] += w1[j][i] * input.get(j);
            }
        }
        return u;
    }
    // </editor-fold>

    // <editor-fold defaultstate="collapsed" desc="Helper functions">
    private double[][] cloneW1(){
        double[][] res = new double[w1.length][w1[0].length];
        for(int i = 0; i < w1.length; i++){
            for(int j = 0;j< w1[0].length;j++)
                res[i][j] = w1[i][j];
        }
        return res;
    }

    private double[] cloneW2(){
        return Arrays.copyOf(w2, w2.length);
    }

    private double w2at(int t){
        if( t < 1)
            return 0;
        else
            return w2Buffer.get(t);
    }

    private double v(int i){
        return this.activationFunction.f(u[i]);
    }

    private double[][] randomWeights(int n, int k) {
        Random r = new Random();
        double[][] w = new double[n][k];
        for (int i = 0; i < n; i++) {
            for (int j = 0; j < k; j++) {
                w[i][j] = r.nextDouble() * 0.2 ;
            }
        }
        return w;
    }

    private double[] randomWeights(int n) {
        Random r = new Random();
        double[] w = new double[n];
        for (int i = 0; i < n; i++) {
            w[i] = r.nextDouble() * 0.2 ;
        }
        return w;
    }
    // </editor-fold>
    
    // <editor-fold defaultstate="collapsed" desc="Getters ans setters">
    
    public ActivationFunction getActivationFunction() {
        return activationFunction;
    }

    public void setActivationFunction(ActivationFunction activationFunction) {
        this.activationFunction = activationFunction;
    }

    /**
     * @return the leaningRate
     */
    public double getLeaningRate() {
        return leaningRate;
    }

    /**
     * @param leaningRate the leaningRate to set
     */
    public void setLeaningRate(double leaningRate) {
        this.leaningRate = leaningRate;
    }

    public double getCurrentForecast() {
        return activationFunction.f(this.g);
    }

    public double getMSE(int n){
        double sum = 0;
        if(n < 1 || errorLog.size() < n){
            for(Double d: errorLog)
                sum += d*d;
            return sum/errorLog.size();
        }
        else{
            for(int i = errorLog.size()-n; i < errorLog.size(); i++){
                sum += errorLog.get(i)*errorLog.get(i);
            }
            return sum/n;
        }
    }
    // </editor-fold>

    public static void main(String[] args) {
        RecursiveMultiLayerPerceptron rmlp = new RecursiveMultiLayerPerceptron(4, 4, 5, true);
        //rmlp.setActivationFunction(new Linear());
//        System.out.println("forecast, real, error");
//        final int step = 1;
//        for (int i = 0; i < 50000; i += step) {
//            System.out.format("%f;%f\n",rmlp.getCurrentForecast(),rmlp.nextTeachingSample(1,1));
//        }

//        ArrayList<Double> test = TestMethods.constant(1000, 1);
        ArrayList<Double> test = TestMethods.sinWithDistortion(5000, 2, 5, 0.1, 1);
        System.out.println("Expected;predicted;error");
        double sum = 0;
        for (Double d: test){
            double err = rmlp.nextTeachingSample(d, d);
            System.out.format("%f;%f;%f\n",d,rmlp.getCurrentForecast(), err);
            sum += err*err;
        }
        System.out.format("MSE: %f", sum/test.size());
        
    }
}
