﻿using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using System.IO;
using System.Threading;
using System.Diagnostics;
using NPlot;

namespace NN
{
    public class backprop
    {
        public double learningRate { get; set; }
        
        // 't' = hyperbolic tangent
        // 's' = sigmoid
        public char nnFunction { get; set; }

        public MainWindow ParentWindow { get; set; }

        public double[,] lastOutputs;

        public List<double> error;
        public List<double> errorMovAvg;

        public List<double> lesionError;
        public List<double> lesionAam;
        public List<double> lesionCorrelation;

        public List<double> vError;
        public List<double> vErrorMovAvg;

        public List<double> aam;
        public List<double> correlation;

        public backprop(int[] Layers, double[,] Inputs, double[,] Outputs, double[,] ValInputs,
            double[,] ValOutputs, char ConvergenceOption, double ConvergenceParameter, bool Lesioning)
        {
            layers = Layers;
            ParentWindow = new MainWindow();
            scale = 1;

            inputs = Inputs;
            lesionInputs = new double[Inputs.GetLength(0), Inputs.GetLength(1)];
            
            outputs = Outputs;
            nnOutputs = new double[Outputs.GetLength(0), Outputs.GetLength(1)];

            if (ValInputs != null && ValOutputs != null)
            {
                valInputs = ValInputs;
                valOutputs = ValOutputs;
                vnnOutputs = new double[valOutputs.GetLength(0), valOutputs.GetLength(1)];
                useValidationSet = true;
                Scale(valInputs, scale);
            }
            else
            {
                useValidationSet = false;
            }
            
            Scale(inputs, scale);
            learningRate = 0.00025;
            latestValidationError = -1;

            nnFunction = 't';

            rand = new Random();

            convergenceParameter = ConvergenceParameter;
            convergenceOption = ConvergenceOption;

            lesioning = Lesioning;
            converged = false;
            abort = false;

            // initialize error, aam, and correlation lists
            lesionError = new List<double>();
            lesionAam = new List<double>();
            lesionCorrelation = new List<double>();

        }
        public backprop(int[] Layers)
        {
            layers = Layers;            
            scale = 1;
            rand = new Random();
            initWeightsAndBiases();           
        }

        public double[][][] GetWeights()
        {
            return (double[][][])weights.Clone();
        }
        public double[][] GetBiases()
        {
            return (double[][])biases.Clone();
        }
        public void SetWeights(double[][][] w)
        {
            weights=w;
        }
        public void SetBiases(double[][] b)
        {
            biases=b;
        }
        public void SetLayers(int[] l)
        {
            layers = l;
        }


        // scales data array to [-scale, +scale]
        public void Scale(double[,] data, int scale)
        {
            scaleMax = double.MinValue;
            scaleMin = double.MaxValue;

            for (int i = 0; i < data.GetLength(0); i++)
                for (int j = 0; j < data.GetLength(1); j++)
                    if (data[i, j] > scaleMax)
                        scaleMax = data[i, j];
                    else if (data[i, j] < scaleMin)
                        scaleMin = data[i, j];
        }

        // initialize the weight and bias arrays
        private void initWeightsAndBiases()
        {
            weights = new double[layers.Length - 1][][];
            initialWeights = new double[layers.Length - 1][][];
            weightUpdates = new double[layers.Length - 1][][];
            savedWeights = new double[layers.Length - 1][][];

            biases = new double[layers.Length - 1][];
            initialBiases = new double[layers.Length - 1][];
            biasUpdates = new double[layers.Length - 1][];
            savedBiases = new double[layers.Length - 1][];

            activations = new double[layers.Length][];
            nodeOutputs = new double[layers.Length][];
            deltas = new double[layers.Length][];

            for (int i = 0; i < weights.GetLength(0); i++)
            {
                weights[i] = new double[layers[i]][];
                initialWeights[i] = new double[layers[i]][];
                weightUpdates[i] = new double[layers[i]][];
                savedWeights[i] = new double[layers[i]][];

                biases[i] = new double[layers[i + 1]];
                initialBiases[i] = new double[layers[i + 1]];
                biasUpdates[i] = new double[layers[i + 1]];
                savedBiases[i] = new double[layers[i + 1]];

                activations[i] = new double[layers[i]];
                nodeOutputs[i] = new double[layers[i]];
                deltas[i] = new double[layers[i]];

                // Initialize weight arrays and set initial random weights
                for (int j = 0; j < layers[i]; j++)
                {
                    weights[i][j] = new double[layers[i + 1]];
                    initialWeights[i][j] = new double[layers[i + 1]];
                    weightUpdates[i][j] = new double[layers[i + 1]];
                    savedWeights[i][j] = new double[layers[i + 1]];

                    for (int k = 0; k < layers[i + 1]; k++)
                    {
                        weights[i][j][k] = rand.NextDouble() * 0.01 - .005;
                        initialWeights[i][j][k] = weights[i][j][k];
                        weightUpdates[i][j][k] = 0;
                        savedWeights[i][j][k] = 0;
                    }
                }

                // Initialize bias arrays and set initial random weights    
                for (int j = 0; j < layers[i + 1]; j++)
                {
                    biases[i][j] = rand.NextDouble() * 2 - 1;
                    initialBiases[i][j] = biases[i][j];
                    biasUpdates[i][j] = 0;
                    savedBiases[i][j] = 0;
                }
            }

            activations[layers.Length - 1] = new double[layers[layers.Length - 1]];
            nodeOutputs[layers.Length - 1] = new double[layers[layers.Length - 1]];
            deltas[layers.Length - 1] = new double[layers[layers.Length - 1]];

        }

        private void initLists()
        {
            error = new List<double>();
            errorMovAvg = new List<double>();
            vError = new List<double>();
            vErrorMovAvg = new List<double>();

            aam = new List<double>();
            correlation = new List<double>();
        }

        // Main BP loop
        public void runBP()
        {            
            initWeightsAndBiases();
            initLists();
            abort = false;

            epochs = 0;
            timer = new Stopwatch();
            timer.Start();

            while (!converged && !abort)
            {
                error.Add(calcOutputs(inputs, outputs, ref nnOutputs, true));

                errorMovAvg.Add(calcValMovAvg(error[epochs], errorMovAvg));
                aam.Add(avgAngleMeasure(outputs, nnOutputs));
                correlation.Add(calcCorrelation(outputs, nnOutputs));

                updateWeights();
                checkConvergence();
                epochs++;                
                
                ParentWindow.BPErrorChanged();
            }
        }

        // Check for convergence of algorithm based on...

        // convergence option = 1: converges when average error is below convergenceParameters
        //                    = 2: converges when average angle measure is above convergenceParameter
        //                    = 3: converges when correlation is above convergenceParameter
        //                    = 4: converges when validation set error starts increasing
        public void checkConvergence()
        {
            //MethodInvoker backPropCompletedDelegate = new MethodInvoker(ParentWindow.EndBackProp);
            // every 100 epochs, check if error has been decreasing
            /*if ( (epochs >= 5000) && (epochs % 250 == 0) )
            {
                if (errorMovAvg[epochs] >= errorMovAvg[epochs - 2000 ]*0.99)
                {
                    converged = true;
                    return;
                }
            }*/

            // convergence based on average error
            if (convergenceOption == '1' && (error[epochs] / inputs.GetLength(0) < convergenceParameter))
            {
                if (useValidationSet)
                {
                    converged = this.validateConvergence();
                }
                else
                {
                    converged = true;
                }

                
                if (converged)
                {
                    timer.Stop();
                    this.backPropTrainingTime = (int)timer.ElapsedMilliseconds / 1000;
                    lesion();
                    //invoke(backPropCompletedDelegate);
                    ParentWindow.EndBackProp();
                }
                
            }
            // convergence based on average angle measure
            else if (convergenceOption == '2' && (Math.Abs(aam[epochs]) > convergenceParameter))
            {
                converged = true;
            }
            // convergence based on correlation value
            else if (convergenceOption == '3' && (Math.Abs(correlation[epochs]) > convergenceParameter))
            {
                converged = true;
            }
            // convergence based on validation set
            else if (convergenceOption == '4')
            {
                // if validation error has been increasing for 'convergenceParameters' number of epochs, break
                int j = 0;
                for (j = 0; j < convergenceParameter; j++)
                    if (vErrorMovAvg[epochs - j] > vErrorMovAvg[epochs - (j + 1)])
                        break;

                if (j == convergenceParameter)
                {
                    converged = true;
                    retrain();
                }
            }
            /*
            // if converged and more than one input, do some lesioning
            if (converged && (inputs.GetLength(1) > 1) && lesioning)
            {
                lesion();
            }
            else if (converged && lesioning)
            {
                System.Console.WriteLine("Only one inputs, can't attempte lesioning");
            }
            */
        }

        private void retrain()
        {
            // reset weights
            for (int i = 0; i < weights.GetLength(0); i++)
                for (int j = 0; j < weights[i].GetLength(0); j++)
                    for (int k = 0; k < weights[i][j].GetLength(0); k++)
                        weights[i][j][k] = initialWeights[i][j][k];

            for (int i = 0; i < biases.GetLength(0); i++)
                for (int j = 0; j < biases[i].GetLength(0); j++)
                    biases[i][j] = initialBiases[i][j];

            // retrain the network up until the determined epoch
            for (int i = 0; i < epochs - convergenceParameter; i++)
            {
                calcOutputs(inputs, outputs, ref nnOutputs, true);
                updateWeights();
            }
        }


        // calculate the error by feed forwarding and comparing results to training data
        private double calcOutputs(double[,] Inputs, double[,] Outputs, ref double[,] NNOutputs, bool training)
        {
            double outE;
            double e = 0;

            int outputSize = Outputs.GetLength(1);
            int inputSize = Inputs.GetLength(1);
            int dataLength = Inputs.GetLength(0);

            double[] tempOut = new double[outputSize];
            double[] tempIn = new double[inputSize];
            double[] desiredOut = new double[outputSize];

            lastOutputs = new Double[dataLength, outputSize];

            for (int i = 0; i < dataLength; i++)
            {
                for (int j = 0; j < inputSize; j++)
                    tempIn[j] = Inputs[i, j];

                for (int j = 0; j < outputSize; j++)
                    desiredOut[j] = Outputs[i, j];

                tempOut = Fprop(tempIn);

                for (int j = 0; j < outputSize; j++)
                    NNOutputs[i, j] = tempOut[j];

                if (training)
                {
                    backProp(tempOut, desiredOut);
                    updateWeightUpdates();
                }

                outE = 0;

                for (int j = 0; j < outputSize; j++)
                {
                    lastOutputs[i, j] = tempOut[j];
                    outE += Math.Pow(outputs[i, j] - tempOut[j], 2);
                }

                e += outE;
            }

            e *= 0.5;
            return e;
        }

        // Forward propagate the results through the network
        public double[] Fprop(double[] inputs)
        {
            // scale inputs based on scaling parameters
            for (int i = 0; i < inputs.GetLength(0); i++)
                inputs[i] = (((inputs[i] - scaleMin) / (scaleMax - scaleMin)) * (2 * scale)) - scale;

            // initialize activations arrays to zero
            for (int i = 0; i < activations.GetLength(0); i++)
                for (int j = 0; j < activations[i].GetLength(0); j++)
                    activations[i][j] = 0;

            for (int i = 0; i < inputs.GetLength(0); i++)
                activations[0][i] = inputs[i];

            for (int i = 0; i < weights.GetLength(0); i++)
            {
                // Calculate outputs for layer i
                for (int j = 0; j < activations[i].GetLength(0); j++)
                    if( nnFunction == 't' )
                        nodeOutputs[i][j] = Math.Tanh(activations[i][j]);
                    else if( nnFunction == 's' )
                        nodeOutputs[i][j] = 1 / (1 + Math.Exp(-activations[i][j]));

                // calculate activations for next layer
                for (int j = 0; j < weights[i].GetLength(0); j++)
                    for (int k = 0; k < weights[i][j].GetLength(0); k++)
                        activations[i + 1][k] += nodeOutputs[i][j] * weights[i][j][k];

                for (int k = 0; k < biases[i].GetLength(0); k++)
                    activations[i + 1][k] += biases[i][k];
            }

            // outputs
            double[] output = new double[layers[layers.Length - 1]];

            for (int i = 0; i < output.Length; i++)
            {
                nodeOutputs[nodeOutputs.GetLength(0) - 1][i] = activations[activations.GetLength(0) - 1][i];
                output[i] = activations[activations.GetLength(0) - 1][i];
            }

            return output;
        }

        // backpropagate error through network to get nodeDeltas
        private void backProp(double[] outputs, double[] desired)
        {
            // calculate the node deltas for the output layer
            for (int j = 0; j < outputs.GetLength(0); j++)
                deltas[layers.Length - 1][j] = desired[j] - outputs[j];

            // calculate them for the hidden layers
            for (int i = layers.Length - 2; i > 0; i--)
            {
                for (int j = 0; j < deltas[i].GetLength(0); j++)
                {
                    double sum = 0;

                    for (int k = 0; k < layers[i + 1]; k++)
                        sum += weights[i][j][k] * deltas[i + 1][k];
                    
                    if( nnFunction == 't' )
                        deltas[i][j] = (1 + nodeOutputs[i][j]) * (1 - nodeOutputs[i][j]) * sum;
                    else if( nnFunction == 's' )
                        deltas[i][j] = nodeOutputs[i][j] * (1 - nodeOutputs[i][j]) * sum;
                                        
                }
            }
        }

        // update weightUpdate arrays, used to update weights at end of epoch
        private void updateWeightUpdates()
        {
            for (int i = 0; i < weights.GetLength(0); i++)
                for (int j = 0; j < weights[i].GetLength(0); j++)
                    for (int k = 0; k < weights[i][j].GetLength(0); k++)
                        weightUpdates[i][j][k] += deltas[i + 1][k] * nodeOutputs[i][j];

            for (int i = 0; i < biases.GetLength(0); i++)
                for (int j = 0; j < layers[i + 1]; j++)
                    biasUpdates[i][j] += deltas[i + 1][j];
        }

        // update the weights of the connections in the network
        private void updateWeights()
        {
            for (int i = 0; i < weights.GetLength(0); i++)
                for (int j = 0; j < weights[i].GetLength(0); j++)
                    for (int k = 0; k < weights[i][j].GetLength(0); k++)
                    {
                        weights[i][j][k] += learningRate * weightUpdates[i][j][k];
                        weightUpdates[i][j][k] = 0;
                    }

            for (int i = 0; i < biases.GetLength(0); i++)
                for (int j = 0; j < layers[i + 1]; j++)
                {
                    biases[i][j] += learningRate * biasUpdates[i][j];
                    biasUpdates[i][j] = 0;
                }
        }

        // calculate the average angle measure based on two data arrays
        private double avgAngleMeasure(double[,] data1, double[,] data2)
        {
            double d, a, dt, at, rn, rd, r;
            double aam = 0;

            for (int i = 0; i < data1.GetLength(1); i++)
            {
                rn = 0;
                rd = 0;

                for (int j = 0; j < data1.GetLength(0); j++)
                {
                    dt = Math.Pow(data1[j, i], 2) + Math.Pow(data2[j, i], 2);
                    if (dt == 0.0)
                        dt = 0.1;
                    d = Math.Sqrt(dt);

                    at = Math.Abs(data1[j, i] + data2[j, i]);
                    at = at / Math.Sqrt(2 * dt);
                    if (at > 1)
                        at = 1;
                    a = Math.Acos(at);

                    rn += (d * a);
                    rd += d;
                }

                r = rn / rd;
                r = 1 - (4 * r) / Math.PI;

                if (r > 1)
                    r = 1;
                else if (r < -1)
                    r = -1;

                aam += r;
            }

            aam = aam / data1.GetLength(1);

            return aam;
        }

        private double calcCorrelation(double[,] data1, double[,] data2)
        {
            double ax, ay, xt, yt, sxx, syy, sxy, r, corr;
            corr = 0;

            for (int i = 0; i < data1.GetLength(1); i++)
            {
                // find the means of the data sets
                ax = 0;
                ay = 0;
                for (int j = 0; j < data1.GetLength(0); j++)
                {
                    ax += data1[j, i];
                    ay += data2[j, i];
                }

                ax /= data1.GetLength(0);
                ay /= data2.GetLength(0);

                // find the correlation coefficient r
                sxx = 0;
                syy = 0;
                sxy = 0;

                for (int j = 0; j < data1.GetLength(0); j++)
                {
                    xt = data1[j, i] - ax;
                    yt = data2[j, i] - ay;

                    sxx += Math.Pow(xt, 2);
                    syy += Math.Pow(yt, 2);
                    sxy += xt * yt;
                }

                if (sxx == 0.0)
                    sxx = 10000.0;
                else if (syy == 0.0)
                    syy = 10000.0;

                r = sxy / Math.Sqrt(sxx * syy);

                if (r > 1.0)
                    r = 1;
                else if (r < -1)
                    r = -1;

                corr += r;
            }

            corr /= data1.GetLength(1);

            return corr;
        }

        // calculate the moving average
        private double calcValMovAvg(double error, List<double> errorList)
        {
            int n = errorList.Count;
            if (n == 0)
                return error;
            else if (n < MOV_AVG_LENGTH)
                return ( (errorList[n - 1] * n + error) / (n + 1) );
            else
                return (errorList[n - 1] - (errorList[n - MOV_AVG_LENGTH] / MOV_AVG_LENGTH) + (error / MOV_AVG_LENGTH));
        }

        // lesion this shit
        private void lesion()
        {
            if (!lesioning || inputs.GetLength(1) < 2)
                return;


            Stopwatch theTime = new Stopwatch();
            theTime.Start();

            // save weights and biases
            for (int i = 0; i < weights.GetLength(0); i++)
                for (int j = 0; j < weights[i].GetLength(0); j++)
                    for (int k = 0; k < weights[i][j].GetLength(0); k++)
                        savedWeights[i][j][k] = weights[i][j][k];

            for (int i = 0; i < biases.GetLength(0); i++)
                for (int j = 0; j < biases[i].GetLength(0); j++)
                    savedBiases[i][j] = biases[i][j];            

            double am = 0;
            double cc = 0;
            double er = 0;

            // try each inputs by setting to zero
            for (int i = 0; i < inputs.GetLength(1); i++)
            {
                // read the inputs into the array, set tested input to zero
                for (int j = 0; j < inputs.GetLength(0); j++)
                    for (int k = 0; k < inputs.GetLength(1); k++)
                    {
                        if (k == i)
                            lesionInputs[j, k] = 0;
                        else
                            lesionInputs[j, k] = inputs[j, k];
                    }

                int e;
                // run backprop on network
                for (e = 0; e < epochs; e++)
                {
                    er = calcOutputs(inputs, outputs, ref nnOutputs, true);

                    updateWeights();
                }

                am = avgAngleMeasure(outputs, nnOutputs);
                cc = calcCorrelation(outputs, nnOutputs);
                lesionError.Add(er);
                lesionAam.Add(am);
                lesionCorrelation.Add(cc);

                // reset weights
                for (int l = 0; l < weights.GetLength(0); l++)
                    for (int j = 0; j < weights[l].GetLength(0); j++)
                        for (int k = 0; k < weights[l][j].GetLength(0); k++)
                            weights[l][j][k] = savedWeights[l][j][k];

                for (int l = 0; l < biases.GetLength(0); l++)
                    for (int j = 0; j < biases[l].GetLength(0); j++)
                        biases[l][j] = savedBiases[l][j];
            }

            // Find the best lesioned solution based on error
            double min = double.MaxValue;
            for (int i = 0; i < lesionError.Count; i++)
                if (lesionError[i] < double.MaxValue)
                {
                    bestLesion = i;
                    min = lesionError[i];
                }

            theTime.Stop();
            lesionTime = (double) theTime.ElapsedMilliseconds / 1000;
        }

        private bool validateConvergence()
        {
            double[] tempOut = new double[outputs.GetLength(1)];
            double[] tempIn = new double[inputs.GetLength(1)];
            double partialError = 0, totalError = 0;
            bool isConverged;

            //run network against validation set
            for (int i = 0; i < valInputs.GetLength(0); i++)
            {                
                for (int j = 0; j < valInputs.GetLength(1); j++)
                {
                    tempIn[j] = valInputs[i, j];
                }

                tempOut = Fprop(tempIn);

                for (int j = 0; j < valOutputs.GetLength(1); j++)
                {
                    partialError += Math.Pow(valOutputs[i, j] - tempOut[j], 2);
                }

                totalError += partialError;
            }

            totalError *= 0.5;

            latestValidationError = totalError;
            totalError /= inputs.GetLength(0);            

            if (totalError < convergenceParameter)
            {
                isConverged = true;
            }
            else
            {
                isConverged = false;
            }

            return isConverged;
        }        

        public int GetElapsedEpochs()
        {
            return epochs;
        }

        public double GetRMSError(int epoch)
        {
            if (epoch > 0)
            {
                return errorMovAvg[epoch - 1];
            }
            else
            {
                return -1;
            }
        }

        public void abortTraining()
        {
            abort = true;
        }

        int epochs;

        public double[][][] weights;
        double[][][] initialWeights;
        double[][][] savedWeights;
        double[][][] weightUpdates;

        double[][] biases;
        double[][] initialBiases;
        double[][] savedBiases;
        double[][] biasUpdates;
        int[] layers;

        double[][] deltas;
        double[][] activations;
        double[][] nodeOutputs;

        double[,] inputs;
        double[,] lesionInputs;
        double[,] outputs;
        public double[,] nnOutputs;
        double[,] vnnOutputs;

        double[,] valInputs;
        double[,] valOutputs;
        public double latestValidationError;

        bool converged;
        bool lesioning;
        bool abort;
        bool useValidationSet;

        int scale;
        public double scaleMax;
        public double scaleMin;

        public int bestLesion;
        public double lesionTime;
        public double backPropTrainingTime;
        public double lesionIters;

        public double convergenceParameter;
        char convergenceOption;
        const int MOV_AVG_LENGTH = 100;
        Random rand;
        Stopwatch timer;

    }
}
