﻿using System;
using System.IO;
using System.Drawing;
using System.Collections.Generic;

namespace NewVSS
{
    partial class NetworkNoPointers
    {
        public int numInputs, numHidden1, numOutputs, numLayers, numEpochs, errMeasure, numVectors, numTestVect, lastColumnContainsOutliers = 0;
        public double dw0 = 0.3, error = 0, accuracy = 0, meanOutlier = 0, offset = 0.997, delta = 0.5, errExp = 2, oldWeightTest, oldError = -1;
        public int[] Layer;
        double[, ,] SignalTableY, SignalTableSumWX, weights, deltaa;
        public double[,] TrainingDataSet;
        public bool classification, RandomWeights = true;
        double[] errorTable;


        public NetworkNoPointers(double[,] DataSet, int numEpochs = 20, int numHidden1 = 5, bool classification = true,
                   bool RandomWeights = true, int errMeasure = 0, double delta = 0.5, double errExp = 2)
        {
            this.numEpochs = numEpochs;
            this.RandomWeights = RandomWeights;
            this.numHidden1 = numHidden1;

            numLayers = 3;

            this.classification = classification;
            this.errMeasure = errMeasure;
            this.delta = delta;
            this.errExp = errExp;
            numInputs = DataSet.GetLength(1) - 1; //last=class/Y, 
            //numVectors = DataSet.GetLength(0);          

            TrainingDataSet = DataSet;
            numVectors = DataSet.GetLength(0);

            if (classification)//How many outputs
            {
                SortedSet<int> Nout = new SortedSet<int>();
                for (int v = 0; v < TrainingDataSet.GetLength(0); v++)
                    Nout.Add((int)TrainingDataSet[v, numInputs]);
                numOutputs = Nout.Count;
            }
            else //regression
                numOutputs = 1;

            this.numEpochs = numEpochs;
            this.numHidden1 = numHidden1;
            CreateNetwork();
        }

        private void CreateNetwork()
        {
            Layer = new int[numLayers];//Layer table gives us information about how many neurons are in each layer
            Layer[0] = numInputs;
            Layer[1] = numHidden1;
            Layer[2] = numOutputs;

            errorTable = new double[numOutputs]; //Table that we use to store errors of each output neuron

            int WeightCounter = numHidden1 + numInputs; 
            int maxNeurons = numInputs;
            for (int i = 1; i < numLayers; i++)
                if (maxNeurons < Layer[i])
                    maxNeurons = Layer[i];

            if(maxNeurons+1 > WeightCounter)
                weights = new double[numLayers, maxNeurons, maxNeurons + 1];
            else
                weights = new double[numLayers, maxNeurons, WeightCounter + 1];
                
            deltaa = new double[weights.GetLength(0), weights.GetLength(1), weights.GetLength(2)];
            SignalTableY = new double[TrainingDataSet.GetLength(0), numLayers, maxNeurons];
            SignalTableSumWX = new double[TrainingDataSet.GetLength(0), numLayers, maxNeurons];

            string[] WeightsFromFile = { "" };
            if (!RandomWeights)
                WeightsFromFile = File.ReadAllLines(@"../../../datasets/weights.txt");
            int wff = 0;


            Random rnd = new Random();
            for (int L = 1; L < 2; L++)
            {
                for (int n = 0; n < Layer[L]; n++)
                {
                    
                    for (int w = 0; w < Layer[0] + 1; w++)
                    {
                        if(RandomWeights)
                        {
                            weights[L, n, w] = 0.5 - rnd.NextDouble(); //adding random weights
                            deltaa[L, n, w] = dw0; //delta
                        }
                        else
                        {
                            weights[L, n, w] = 0.5 - Double.Parse(WeightsFromFile[wff++], System.Globalization.CultureInfo.InvariantCulture); //adding weights from file
                            deltaa[L, n, w] = dw0; //delta
                        }
                    }
                }

            }
            for (int L = 2; L < 3; L++)
            {
                for (int n = 0; n < Layer[L]; n++)
                {

                    for (int w = 0; w < WeightCounter + 1; w++)
                    {
                        if (RandomWeights)
                        {
                            weights[L, n, w] = 0.5 - rnd.NextDouble(); //adding random weights
                            deltaa[L, n, w] = dw0; //delta
                        }
                        else
                        {
                            weights[L, n, w] = 0.5 - Double.Parse(WeightsFromFile[wff++], System.Globalization.CultureInfo.InvariantCulture); //adding weights from file
                            deltaa[L, n, w] = dw0; //delta
                        }
                    }
                }

            }


        }

        public void VSS()// MLP network learning algorithm: finds the optimal weights that minimize MSE
        // that can be any MLP learning algorithm, I use a simplified version of VSS here
        // just a small value dw is added to each single weight and the error is calculated if the error decreases,
        // w = w + dw, else dw is subtracted t=from that the weight and the error is calculated again
        // and the error is calculated if the error decreases, w = w - dw, otherwise w remains unchanged.
        // This operation is repeated with each weight and the whole iteration is repeated several times until
        // decreases significiantly
        {
            oldError = getError(TrainingDataSet);
            double Error = oldError;
            double dw = dw0;
            for (int e = 0; e < numEpochs; e++)
            {
                if (classification)
                    Console.WriteLine(oldError / numVectors + "    " + accuracy / numVectors);
                else
                    Console.WriteLine(oldError / numVectors);
                dw *= 0.995;
                for (int L = 1; L < numLayers; L++)
                {
                    for (int n = 0; n < Layer[L]; n++)
                    {
                        for (int w = 0; w < Layer[L - 1] + 1; w++)
                        {

                            bool errorDecreased = false;
                            dw = 0.67 * deltaa[L, n, w];
                            double oldW = weights[L, n, w];
                            oldWeightTest = weights[L, n, w];
                            weights[L, n, w] += dw;
                            rec(L, n, w);
                            if ((Error = getError(TrainingDataSet, L, n, w)) < oldError)
                            {
                                oldError = Error;
                                errorDecreased = true;
                                deltaa[L, n, w] = dw;
                                oldWeightTest = weights[L, n, w];
                                weights[L, n, w] += dw;
                                rec(L, n, w);
                                if ((Error = getError(TrainingDataSet, L, n, w)) < oldError)
                                {
                                    oldError = Error;
                                    deltaa[L, n, w] = 2 * dw;
                                }
                                else
                                {
                                    oldWeightTest = weights[L, n, w];
                                    weights[L, n, w] -= dw;
                                    rec(L, n, w);
                                }
                            }
                            else
                            {
                                oldWeightTest = weights[L, n, w];
                                weights[L, n, w] -= 2 * dw;
                                rec(L, n, w);
                                if ((Error = getError(TrainingDataSet, L, n, w)) < oldError)
                                {
                                    oldError = Error;
                                    errorDecreased = true;
                                    oldWeightTest = weights[L, n, w];
                                    weights[L, n, w] -= dw;
                                    deltaa[L, n, w] = -dw;
                                    rec(L, n, w);
                                    if ((Error = getError(TrainingDataSet, L, n, w)) < oldError)
                                    {
                                        oldError = Error;
                                        deltaa[L, n, w] = -2 * dw;
                                    }
                                    else
                                    {
                                        oldWeightTest = weights[L, n, w];
                                        weights[L, n, w] += dw;
                                        rec(L, n, w);
                                    }
                                }
                                if (!errorDecreased)
                                {
                                    oldWeightTest = weights[L, n, w];
                                    weights[L, n, w] = oldW;
                                    rec(L, n, w);
                                    deltaa[L, n, w] = 0.67 * dw;
                                }
                            }
                        }//for w
                    }//for n
                }//for L
            } //for e
        }

        public double getError(double[,] DataSet1, double errorExponent = 2, int test = 0, bool outliers = false, int errMeasure = 0, double delta = 0.5)
        // all the signals for every training vector get propagated through the network to calculate the error
        {
            double Error = 0;
            int numVect = DataSet1.GetLength(0);
            accuracy = 0;
            //all the signals for every vector get propagated for the first time
            for (int vect = 0; vect < numVect; vect++)
            {
                for (int n = 0; n < Layer[0]; n++)
                    SignalTableY[vect, 0, n] = DataSet1[vect, n];

                for (int L = 1; L < 2; L++)
                {
                    for (int n = 0; n < Layer[L]; n++)
                    {
                        SignalTableSumWX[vect, L, n] = 0;
                        for (int w = 0; w < Layer[L - 1]; w++)
                        {
                            SignalTableSumWX[vect, L, n] += weights[L, n, w] * SignalTableY[vect, L - 1, w];
                        }
                        SignalTableSumWX[vect, L, n] += weights[L, n, Layer[L - 1]]; //bias 
                        SignalTableY[vect, L, n] = Math.Tanh(SignalTableSumWX[vect, L, n]);//y
                    }
                }
                for (int L = 2; L < 3; L++)
                {
                    for (int n = 0; n < Layer[L]; n++)
                    {
                        SignalTableSumWX[vect, L, n] = 0;
                        for (int w = 0; w < Layer[L - 1]; w++)
                            SignalTableSumWX[vect, L, n] += weights[L, n, w] * SignalTableY[vect, L - 1, w];
                        
                        for (int w = Layer[L-1]; w < Layer[L - 2]+Layer[L-1]; w++)
                            SignalTableSumWX[vect, L, n] += weights[L, n, w] * SignalTableY[vect, L - 2, (w-Layer[L-1])];

                        SignalTableSumWX[vect, L, n] += weights[L, n, Layer[L - 1] + Layer[L - 2]]; //bias 
                        SignalTableY[vect, L, n] = Math.Tanh(SignalTableSumWX[vect, L, n]);//y
                    }
                }
            }
            if (classification)
            {
                int L = numLayers - 1, y;
                double maxY, maxN;
                for (int vect = 0; vect < numVect; vect++)
                {
                    maxY = -1; maxN = -1;
                    for (int n = 0; n < Layer[L]; n++)
                    {
                        if (SignalTableY[vect, L, n] > maxY)
                        {
                            maxY = SignalTableY[vect, L, n];
                            maxN = n;
                        }
                        y = (int)DataSet1[vect, Layer[0]] - 1;
                        //class numbering starts from 1
                        if (n == y)  //N[L][y].Y is expected to be 1;
                        {
                            if (SignalTableY[vect, L, n] < offset)
                                Error += Math.Pow(Math.Abs(SignalTableY[vect, L, n] - 1), errorExponent); //calculating network for classification
                        }
                        else  //N[L][y].Y is expected to be -1;
                        {
                            if (SignalTableY[vect, L, n] > -offset)
                                Error += Math.Pow(Math.Abs(SignalTableY[vect, L, n] + 1), errorExponent); //calculating network for classification
                        }
                    }

                    if (maxN == DataSet1[vect, Layer[0]] - 1)
                        accuracy++;
                }
            }
            else
            {
                int L = numLayers - 1;
                for (int vect = 0; vect < numVect; vect++)
                {
                    for (int n = 0; n < Layer[L]; n++)
                    {
                        SignalTableY[vect, L, n] = SignalTableSumWX[vect, L, n];
                        Error += Math.Pow(Math.Abs(SignalTableY[vect, L, n] - DataSet1[vect, Layer[0]]), errorExponent); //calculating network for regression
                    }
                }
            }

            return Error;
        }

        public void rec(int L1, int n1, int w1) //recalculating Y and SumWX for each vector after weight change [only the neuron which weight was changed, and all in the upper layers]
        {
            for (int vect = 0; vect < TrainingDataSet.GetLength(0); vect++)
            {
                if (w1 == Layer[L1 - 1])
                {
                    SignalTableSumWX[vect, L1, n1] += weights[L1, n1, w1];
                    SignalTableSumWX[vect, L1, n1] -= oldWeightTest;
                }
                else
                {
                    SignalTableSumWX[vect, L1, n1] += weights[L1, n1, w1] * SignalTableY[vect, L1 - 1, w1];
                    SignalTableSumWX[vect, L1, n1] -= oldWeightTest * SignalTableY[vect, L1 - 1, w1];
                }
                double oldY = SignalTableY[vect, L1, n1];
                SignalTableY[vect, L1, n1] = Math.Tanh(SignalTableSumWX[vect, L1, n1]);
                if (L1 != numLayers - 1)
                {
                    for (int L = L1 + 1; L < L1 + 2; L++)
                    {
                        for (int n = 0; n < Layer[L]; n++)
                        {
                            SignalTableSumWX[vect, L, n] += weights[L, n, n1] * SignalTableY[vect, L - 1, n1];
                            SignalTableSumWX[vect, L, n] -= weights[L, n, n1] * oldY;
                            SignalTableY[vect, L, n] = Math.Tanh(SignalTableSumWX[vect, L, n]); //y
                        }
                    }
                    for (int L = L1 + 2; L < numLayers; L++)
                    {
                        for (int n = 0; n < Layer[L]; n++)
                        {
                            SignalTableSumWX[vect, L, n] = 0;
                            for (int w = 0; w < Layer[L - 1]; w++)
                                SignalTableSumWX[vect, L, n] += weights[L, n, w] * SignalTableY[vect, L - 1, w];
                            for (int w = Layer[L - 1]; w < Layer[L - 2] + Layer[L - 1]; w++)
                                SignalTableSumWX[vect, L, n] += weights[L, n, w] * SignalTableY[vect, L - 2, (w - Layer[L - 1])];
                            SignalTableSumWX[vect, L, n] += weights[L, n, Layer[L - 1] + Layer[L - 2]]; //bias 
                            SignalTableY[vect, L, n] = Math.Tanh(SignalTableSumWX[vect, L, n]);//y
                        }
                    }
                }
            }

        }

        public double getError(double[,] DataSet1, int L1, int n1, int w1, double errorExponent = 2, int test = 0, bool outliers = false, int errMeasure = 0, double delta = 0.5)
        //calculating network (all signals are stored in SignalTable)
        {
            double Error = 0;
            int numVect = DataSet1.GetLength(0);
            numTestVect = numVect;
            accuracy = 0;
            for (int i = 0; i < errorTable.Length; i++)
                errorTable[i] = 0;
            if (classification)
            {
                int L = numLayers - 1;
                double maxY, maxN;
                for (int vect = 0; vect < numVect; vect++)
                {
                    maxY = -1; maxN = -1;

                    for (int n = 0; n < Layer[L]; n++)
                    {
                        if (SignalTableY[vect, L, n] > maxY)
                        {
                            maxY = SignalTableY[vect, L, n];
                            maxN = n;
                        }
                        int y = (int)DataSet1[vect, Layer[0]] - 1;
                        //class numbering starts from 1
                        if (n == y)  //N[L][y].Y is expected to be 1;
                        {
                            if (SignalTableY[vect, L, n] < offset)
                                Error += Math.Pow(Math.Abs(SignalTableY[vect, L, n] - 1), errorExponent); //calculating network for classification
                        }
                        else  //N[L][y].Y is expected to be -1;
                        {
                            if (SignalTableY[vect, L, n] > -offset)
                                Error += Math.Pow(Math.Abs(SignalTableY[vect, L, n] + 1), errorExponent); //calculating network for classification
                        }
                    }


                    if (maxN == DataSet1[vect, Layer[0]] - 1)
                        accuracy++;
                }
            }
            else
            {
                int L = numLayers - 1;
                for (int vect = 0; vect < numVect; vect++)
                {
                    for (int n = 0; n < Layer[L]; n++)
                    {
                        SignalTableY[vect, L, n] = SignalTableSumWX[vect, L, n];
                        Error += Math.Pow(Math.Abs(SignalTableY[vect, L, n] - DataSet1[vect, Layer[0]]), errorExponent); //calculating network for regression
                    }

                }
            }

            return Error;
        }
    }
}




