package nnet2.net;

import java.util.ArrayList;
import java.util.Arrays;

import nnet2.gui.PlotPanel;
import nnet2.learning.TestMethods;
import nnet2.net.functions.ActivationFunction;
import nnet2.net.functions.Sigmoid;

public class RMLP {

    private ActivationFunction func = new Sigmoid(1f);
    private double learningRate = 0.04f;
    private double momentum = 0.01f;
    //public ActivationFunction func = new Linear(); //test reasons
    private InputLayer x;	// wektor wejsciowy po uwzglednieniu wartosci z rekurencji i polaryzacji
    private double[] u; 		// wartosci wchodz�ce na neuron i warstwy ukrytej przed zastosowaniem funkcji
    private double[] v;		// wektor wyjsciowy warstwy ukrytej
    private double g;		// wejscie na neuron wyjsciowy
    private double y;		// wyjscie
    private double[][] w1;	// wagi pomiedzy warstwą wejsciowa, a ukrytą
    private double[][] w2;	// wagi pomiedzy warstwa ukryta, a wyjsciowa
    private int hiddenSize;
    @SuppressWarnings("unused")
	private int inputSize;
    private int outputSize;
    private int inputRecursionSize;
    private int outputRecursionSize;
    private ArrayList<double[]> w2gradients = new ArrayList<double[]>();

    public RMLP(int inputSize, int hiddenSize, int outputSize, int recursiveInputSize, int recursiveOutputSize, boolean bias) {
        initialize(inputSize, hiddenSize, outputSize, recursiveInputSize, recursiveOutputSize, bias);
    }

    public RMLP(int inputSize, int hiddenSize, int outputSize, int recursiveInputSize, int recursiveOutputSize) {
        initialize(inputSize, hiddenSize, outputSize, recursiveInputSize, recursiveOutputSize, true);
    }

    private void initialize(int inputSize, int hiddenSize, int outputSize, int recursiveInputSize, int recursiveOutputSize, boolean bias) {
        this.inputSize = inputSize;
        this.hiddenSize = hiddenSize;
        this.outputSize = outputSize;
        this.inputRecursionSize = recursiveInputSize;
        this.outputRecursionSize = recursiveOutputSize;


        x = new InputLayer(inputSize, recursiveInputSize, recursiveOutputSize, bias);
        u = new double[hiddenSize];
        v = new double[hiddenSize];

        w1 = randomWeights(x.getSize(), hiddenSize);
        w2 = randomWeights(hiddenSize, outputSize);
    }

    public double[] process(double[] inputs) {
        calculateValues(inputs);
        recursiveStep(inputs);
        return new double[]{y}; //#
    }

    public double train(ArrayList<Double> set, double err) {
        double error, errSqr;
        double[] delta2hist = new double[this.hiddenSize + 1];
        double[][] delta1hist = new double[this.x.getSize() + 1][this.hiddenSize + 1];
        double mse = 0f;

        for (int t = 0; t < set.size() - 1; t++) {
            //System.out.println();
            double[] in = new double[]{set.get(t)};
            calculateValues(in); // result in y
            
            error = y - set.get(t + 1);
            errSqr = error*error/2;
            //System.out.format("Error:\t%f\n", error);

            double[] gradW2 = new double[this.hiddenSize];
            double delta;
            // liczenie ostatniej warstwy
            for (int a = 0; a < this.hiddenSize; a++) {
                gradW2[a] = gradientW2(a, t);
                //System.out.println(gradW2[a]);
                delta = getLearningRate() * error * gradW2[a] + this.getMomentum() * delta2hist[a]; //#
                delta2hist[a + 1] = delta;
                //System.out.format("Delta w2 %e\n", delta);
                this.w2[a][0] += delta;
            }
            this.w2gradients.add(t, gradW2);

            //liczenie warstwy wewnetrzenej
            for (int a = 0; a < this.x.getSize(); a++) {
                for (int b = 0; b < this.hiddenSize; b++) {
                    delta = getLearningRate() * error * gradientW1(b, a, t)  + this.getMomentum() * delta1hist[a][b];
                    delta1hist[a + 1][b + 1] = delta;
                    //System.out.format("Delta w1 %e\n", delta);
                    w1[a][b] += delta;
                }
            }

            /*int i = t - this.outputRecursionSize;
            if(i>0)
            this.w2gradients.remove(i);
             */
            // TODO: memory leak!!! Naprawic. powyzsza metoda nie przejdzie z arraylist
            recursiveStep(in);
            mse += errSqr;
        }
        mse = mse / set.size();
        //System.out.format("mse: %e\n", mse);
//                if(mse < err) return;
        //mse = 0f;
        //printWeights();
        return mse;
        //this.w2gradients.clear();
    }

    private double gradientW1(int a, int b, int t) {
        int N = this.inputRecursionSize;
        int P = this.outputRecursionSize;
        int K = this.hiddenSize;
        double hidSum = 0;
        double recSum = 0;
        for (int i = 0; i < K; i++) {
            recSum = 0;
            for (int j = 1; j < P; j++) {
                recSum += w1[j + N + 1][i] * getGradientW2atTime(t - P - 1 + j)[a];
                if (i == a) {
                    recSum += x.get(b);
                }
            }
            hidSum += w2[i][0] * getFunc().df(u[i]) * recSum;	//#
        }
        return getFunc().df(g) * hidSum;
    }

    private double gradientW2(int a, int t) {
        int N = this.inputRecursionSize;
        int P = this.outputRecursionSize;
        int K = this.hiddenSize;

        double sum = 0;
        double _sum = 0;
        for (int i = 0; i < K; i++) {
            _sum = 0;
            for (int j = 1; j < P; j++) {
                _sum += w1[j + N + 1][i] * getGradientW2atTime(t - P - 1 + j)[a]; // +1 bo w ksiazce N = we + pamiec we
            }
            sum += w2[i][0] * getFunc().df(u[i]) * _sum; //#
        }
        return getFunc().df(g) * (v[a] + sum);
    }

    private double[] getGradientW2atTime(int t) {
        if (t < 0) {
            return new double[this.hiddenSize]; // same zera
        } else {
            return this.w2gradients.get(t);
        }
    }

    public double[] calculateValues(double[] inputs) {
        double[] resp = new double[this.outputSize];

        x.process(inputs);

        // process first and hidden layer
        for (int j = 0; j < this.hiddenSize; j++) {
            u[j] = 0;
            for (int i = 0; i < this.x.getSize(); i++) {
                u[j] += w1[i][j] * x.get(i);
            }
            v[j] = getFunc().f(u[j]);
        }

        g = 0f;
        for (int i = 0; i < this.hiddenSize; i++) {
            g += w2[i][0] * v[i]; //#
        }

        y = getFunc().f(g);

        resp[0] = y; //#
        return resp;
    }

    private void recursiveStep(double[] inputs) {
        x.pushInput(inputs[0]); //#
        x.pushOutput(y); //#
    }

    private double[][] randomWeights(int n, int k) {
        double[][] w = new double[n][k];
        for (int i = 0; i < n; i++) {
            for (int j = 0; j < k; j++) {
                w[i][j] = /*r.nextDouble() */ 0.2;
            }
        }
        return w;
    }

    public void printWeights(){
        //for(int i = 0; i < this.w1.length; i++)
        //    System.out.println(Arrays.toString(this.w1[i]));
        System.out.println(Arrays.deepToString(w1));
        //System.out.println("...");
        System.out.println(Arrays.deepToString(w2));

//        for(int i = 0; i < this.w2.length; i++)
//            System.out.println(Arrays.toString(this.w2[i]));
    }


    public static void main(String[] args) {
        RMLP net = new RMLP(1, 2, 1, 2, 2, true);
        ArrayList<Double> values = new ArrayList<Double>();
//        for (int i = 0; i < 100; i++) {
//            //values.add((double) Math.sin(i*0.1));
//            //values.add(TestMethods.fibbonacci(i)/10000000f);
//            //values.add(i / 10000f);
//        }
        values = TestMethods.sinWithDistortion(500, 1, 3,0.1,0.5);
        for (int i = 0; i < 500; i++) {
            //System.out.format("iteration: %d ",i);
            net.train(values,0.000001f);
        }
        

        ArrayList<Double> fc = new ArrayList<Double>();
        for(double f: values){
            //System.out.format("%f; %f\n",f,net.calculateValues(new double[]{f})[0]);
            fc.add(net.calculateValues(new double[]{f})[0]);
        }
        //net.printWeights();

        PlotPanel.showPlot(values, fc);
    }

    public ActivationFunction getFunc() {
        return func;
    }

    public void setFunc(ActivationFunction func) {
        this.func = func;
    }

    public double getLearningRate() {
        return learningRate;
    }

    public void setLearningRate(double learningRate) {
        this.learningRate = learningRate;
    }

    public double getMomentum() {
        return momentum;
    }

    public void setMomentum(double momentum) {
        this.momentum = momentum;
    }
}
