﻿using System;
using System.IO;
using System.Drawing;
using System.Collections.Generic;



namespace MLP
{
    partial class Network
    {

        public int numInputs, numOutputs, numLayers, numEpochs, errMeasure, numVectors, numTestVect, VSS_version,
                   lastColumnContainsOutliers = 0;
        public double sumMedian, dw0, error = 0, accuracy = 0, meanOutlier = 0, offset = 0.997, delta = 0.5, errExp = 2;
        public int[] Layer;
        public Neuron[][] N;
        public double[][] TrainingDataSet, TestDataSet;
        int bitmapHeight = 1002, bitmapWidth = 1800;
        public double outlierErrorCoefficiant = 1;
        string outputFileName;
        public string trainingAlgorithm;
        //public int robustAlgorithm;
        public bool classification, outputNeuronLinearForRegression, RandomWeights = true, productUnits;
        double outlierCoef = 1, oc,  graphics1Scale = 100,  oldError = -1;
        StreamWriter /* sw */ weightFile, signalFile, errorFile;
        bool saveLearning;
        int minClassNumber = 1;
        public bool parallelTraining = false;

        enum TransferFunction
        {
            Tanh, Tanh0995, Linear3, Linear5, Sin, Stair2, Stair3, Stair5, Stair9, Radial1, Radial2
        }
        TransferFunction transferFunction;


        public struct Neuron
        {
            public double[] weight, oldWeight;
            public double[] delta;
            public double sumWX, Y;
        }

        double calculationProgress = 0;

        public Network(double[][]DataSet, int[] numHidden, int numEpochs = 20,
                   double sumMedian = 0, double weightRegularization = 0,
                   double percentTestSet = 0.33, string outputFileName = "results", int transferF = 0, double dw0 = 0.5,
                   bool classification = true, bool outputNeuronLinearForRegression = true, double outlierErrorCoefficiant = 1,
                   int lastColumnContainsOutliers = 0, bool RandomWeights = true, int errMeasure = 0, 
                   double errExp = 2, int VSS_version = 0, bool saveLearning = false)
        {

            this.saveLearning = saveLearning;
            this.VSS_version = VSS_version;
            this.numEpochs = numEpochs;
            this.classification = classification;
            this.outputFileName = outputFileName;
            this.RandomWeights = RandomWeights;
            this.sumMedian = sumMedian;
            this.outlierErrorCoefficiant = outlierErrorCoefficiant;
            this.lastColumnContainsOutliers = lastColumnContainsOutliers;
            this.dw0 = dw0;
            this.outputNeuronLinearForRegression = outputNeuronLinearForRegression;
            this.errMeasure = errMeasure;
          
            this.errExp = errExp;
         
             numInputs = DataSet[0].GetLength(0) - 1 - lastColumnContainsOutliers; //last=class/Y, outCoef
  
            //numVectors = DataSet.GetLength(0);

            if (lastColumnContainsOutliers == 1)
            {
                int nv = DataSet.GetLength(0);
                meanOutlier = 0;

                for (int i = 0; i < nv; i++)
                    meanOutlier += DataSet[i][numInputs + 1];
                meanOutlier /= nv;
                meanOutlier *= outlierErrorCoefficiant;
            }

            oc = meanOutlier * outlierErrorCoefficiant;

            // sw = new StreamWriter(outputFileName);

            transferFunction = (TransferFunction)transferF;

            int atrn = 0, atst = 0;
            if (percentTestSet == 0)
            {
                TrainingDataSet = DataSet;
                numVectors = DataSet.GetLength(0);
            }
            else
            {
                numVectors = (int)((1 - percentTestSet) * DataSet.GetLength(0));
                TrainingDataSet = new double[numVectors][];
                for (int vx = 0; vx < numVectors; vx++)
                    TrainingDataSet[vx] = new double [DataSet[0].GetLength(0)];

                int testSize = DataSet.GetLength(0) - numVectors;
                TestDataSet = new double[testSize][];
                for (int vx = 0; vx < testSize; vx++)
                   TestDataSet[vx] = new double[DataSet[0].GetLength(0)];

                int vTrn = 0, vTst = 0;
                Random R = new Random();
                for (int v = 0; v < DataSet.GetLength(0); v++)
                {
                    if (R.NextDouble() < (1 - percentTestSet))
                    {
                        for (int a = 0; a < DataSet[0].GetLength(0); a++)
                            TrainingDataSet[vTrn][a] = DataSet[v][a];
                        vTrn++;
                    }
                    else
                    {
                        for (int a = 0; a < DataSet[0].GetLength(0); a++)
                            TestDataSet[vTst][a] = DataSet[v][a];
                        vTst++;
                    }

                    if (vTrn == numVectors)
                    {
                        atrn = vTrn;
                        for (int v1 = v + 1; v1 < DataSet.GetLength(0); v1++)
                        {
                            for (int a = 0; a < DataSet[0].GetLength(0); a++)
                                TestDataSet[vTst][a] = DataSet[v1][a];
                            vTst++;
                        }
                        break;
                    }
                    else if (vTst == testSize)
                    {
                        atst = vTst;
                        for (int v1 = v + 1; v1 < DataSet.GetLength(0); v1++)
                        {
                            for (int a = 0; a < DataSet[0].GetLength(0); a++)
                                TrainingDataSet[vTrn][a] = DataSet[v1][a];
                            vTrn++;
                        }
                        break;
                    }
                  
                   /* if (v == DataSet.GetLength(0) - 2)
                    {
                        if (vTrn < numVectors)
                        {
                            for (int a = 0; a < DataSet[0].GetLength(0); a++)
                                TrainingDataSet[vTrn][a] = DataSet[v][a];
                            vTrn++;
                        }
                    }*/

                }

            }


            if (classification)
            {
                SortedSet<int> Nout = new SortedSet<int>();
                for (int v = 0; v < TrainingDataSet.GetLength(0); v++)
                {
                    Nout.Add((int)TrainingDataSet[v][numInputs]);
                 //   if ((int)TrainingDataSet[v, numInputs] == 0)
                  //      minClassNumber = 0;
                }
                numOutputs = Nout.Count;
               
            }
            else //regression
                numOutputs = 1;


            numLayers = numHidden.Length + 2;
            if (VSS_version > 2 && VSS_version != 5)
                numLayers = 3;

            Layer = new int[numLayers];//Layer table gives us information about how many neurons are in each layer
            Layer[0] = numInputs;
            for (int i = 1; i < numLayers - 1; i++)
                Layer[i] = numHidden[i - 1];
            Layer[numLayers - 1] = numOutputs;

            graphics1Scale = bitmapWidth / ((Layer[1] + 1) * numEpochs);



            switch (VSS_version)
            {
               // case 0: CreateNetwork(); break;
                case 1: case 2: case 5: CreateNetworkST(); break;               
                case 3: case 4: CreateNetworkCross(); break;                
            }

        }




        /*
        private void CreateNetwork()
        {


            N = new Neuron[numLayers][];
            for (int L = 0; L < numLayers; L++)
                N[L] = new Neuron[Layer[L]];

            int numW = 10000;
            string[] WeightsFromFile;
            if (!File.Exists(@"..\..\..\Weights.txt"))
            {
                Random rnd1 = new Random();
                WeightsFromFile = new string[numW];
                for (int i = 0; i < numW; i++)
                    WeightsFromFile[i] = rnd1.NextDouble().ToString();

                File.WriteAllLines(@"..\..\..\Weights.txt", WeightsFromFile);
            }
            else
                WeightsFromFile = File.ReadAllLines("Weights.txt");


            int wff = 1;

            Random rnd = new Random();
            for (int L = 0; L < numLayers; L++)
                for (int n = 0; n < Layer[L]; n++)
                {
                    if (L == 0)
                    {
                        N[L][n].weight = new double[1];
                        N[L][n].oldWeight = new double[1];
                        N[L][n].weight[0] = 1;  // the Weights in the input layer are for the purpose of incorporating
                        // the apriori determined importance of particular attributes
                        N[L][n].oldWeight[0] = 1;
                    }
                    else
                    {
                        N[L][n].weight = new double[Layer[L - 1] + 1]; // +1 for bias
                        N[L][n].oldWeight = new double[Layer[L - 1] + 1]; // +1 for bias
                        N[L][n].delta = new double[Layer[L - 1] + 1]; // +1 for bias

                        if (RandomWeights)
                        {
                            for (int w = 0; w < N[L][n].weight.Length; w++)
                            {
                                N[L][n].weight[w] = 0.5 - rnd.NextDouble();  // initializing Weights with random numbers from -0.5 to +0.5
                                N[L][n].oldWeight[w] = N[L][n].weight[w];
                                N[L][n].delta[w] = dw0;
                            }
                        }
                        else
                        {
                            for (int w = 0; w < N[L][n].weight.Length; w++)
                            {
                                N[L][n].weight[w] = 0.5 - Double.Parse(WeightsFromFile[wff++], System.Globalization.CultureInfo.InvariantCulture);
                                N[L][n].oldWeight[w] = N[L][n].weight[w];
                                N[L][n].delta[w] = dw0;
                            }
                        }
                    }
                }


        }
        */

       
        public void VSS(int version, ref double calculationProgress, int numLayers1)
        {
            VSS_version = version;
            switch (version)
            {
               // case 0: VSS_BMP(); break;
              //  case 1: VSS_ST_PTR(); break;
                case 2: VSS_ST(ref calculationProgress, numLayers); break;
                case 3: VSS_ST_3(); break;
             //   case 4: VSS_ST_PTR_4(); break;
                case 5: VSS_ST_parallel(ref calculationProgress, numLayers1); break;
            }
        }
       

        /*
         * 
         *    GetError:
         *    0.  none + PTR
         *    1.  none
         *    2.  outliers + PTR
         *    3.  outliers
         * 
         *    VSS:
         *    0.  PTR + all
         *    1.  all
         * 
         * 
         */



        /*
        public void VSS_BMP() // MLP network learning algorithm: finds the optimal Weights that minimize MSE 
        // that can be any MLP learning algorithm, I use a simplified version of VSS here
        // just a small value dw is added to each single weight and the error is calculated if the error decreases, 
        // w = w + dw, else dw is subtracted t=from that the weight and the error is calculated again 
        // and the error is calculated if the error decreases, w = w - dw, otherwise w remains unchanged.
        // This operation is repeated with each weight and the whole iteration is repeated several times until 
        // decreases significiantly
        {

            StreamWriter sw = new StreamWriter(outputFileName + ".txt");
            bool useBitmap = true;
            if (!useBitmap)
                bitmapWidth = 1;

            Bitmap bitmap = new Bitmap(bitmapWidth, bitmapHeight);
            Pen redPen = new Pen(Color.Red, 3);
            Pen grayPen = new Pen(Color.Gray, 3);
            Pen bluePen = new Pen(Color.Blue, 3);
            Graphics graphics = Graphics.FromImage(bitmap);
            graphics.FillRectangle(new SolidBrush(Color.White), 0, bitmapWidth, 0, bitmapHeight);


            oldError = getError(TrainingDataSet, 0, 0, 0, errExp, 0, true, errMeasure, delta);
            double Error = oldError;
            int numWeights = 0;
            for (int L = 1; L < numLayers; L++)
                for (int n = 0; n < Layer[L]; n++)
                    for (int w = 0; w < N[L][n].weight.Length; w++)
                        numWeights++;

            // double y0 = (double)bitmapHeight / numVectors;
            double y0 = (double)bitmapHeight / oldError;
            double yb0 = y0;
            if (!classification)
                y0 /= 2;
            int x, y, old_x = 1, old_y = bitmapHeight, old_yb = 0, yb;
            // bool ex = false;


            int nw = 0;
            int numNeurons = 0;
            for (int i = 1; i < Layer.Length; i++)
                numNeurons += Layer[i];



            if (bitmapWidth > 1)
            {
                graphics.DrawLine(grayPen, 0, 0, bitmapWidth, 0);
                graphics.DrawLine(grayPen, 0, bitmapHeight - 1, bitmapWidth, bitmapHeight - 2);
                x = (int)Math.Round((double)((bitmapWidth * nw) / (numWeights * numEpochs)));
                graphics.DrawLine(grayPen, x, 0, x, bitmapHeight);
                for (int e = 1; e <= numEpochs; e++)
                {
                    nw = numWeights * e;
                    x = (int)Math.Round((double)((bitmapWidth * nw) / (numWeights * numEpochs)));
                    graphics.DrawLine(grayPen, x, 0, x, bitmapHeight);
                }
            }

            // nr = 0;
            sw.WriteLine("e L n w       weight           error");
            nw = 0;
            double dw = dw0;
            for (int e = 1; e <= numEpochs; e++)
            {
                dw *= 0.995;
                for (int L = 1; L < numLayers; L++)
                {
                    for (int n = 0; n < Layer[L]; n++)
                    {
                        //nr++;
                        for (int w = 0; w < N[L][n].weight.Length; w++)
                        {
                            nw++;
                            bool errorDecreased = false;
                            dw = 0.67 * N[L][n].delta[w];
                            double oldW = N[L][n].weight[w];
                            N[L][n].weight[w] += dw;
                            if ((Error = getError(TrainingDataSet, L, n, w, errExp, 0, true, errMeasure, delta)) < oldError)
                            {
                                oldError = Error;
                                errorDecreased = true;
                                N[L][n].delta[w] = dw;
                                N[L][n].weight[w] += dw;

                                if ((Error = getError(TrainingDataSet, L, n, w, errExp, 0, true, errMeasure, delta)) < oldError)
                                {
                                    oldError = Error;
                                    N[L][n].delta[w] = 2 * dw;
                                }
                                else
                                    N[L][n].weight[w] -= dw;
                            }
                            else
                            {
                                N[L][n].weight[w] -= 2 * dw;
                                if ((Error = getError(TrainingDataSet, L, n, w, errExp, 0, true, errMeasure, delta)) < oldError)
                                {
                                    oldError = Error;
                                    errorDecreased = true;
                                    N[L][n].weight[w] -= dw;
                                    N[L][n].delta[w] = -dw;

                                    if ((Error = getError(TrainingDataSet, L, n, w, errExp, 0, true, errMeasure, delta)) < oldError)
                                    {
                                        oldError = Error;
                                        N[L][n].delta[w] = -2 * dw;
                                    }
                                    else
                                        N[L][n].weight[w] += dw;
                                }


                                if (!errorDecreased)
                                {
                                    N[L][n].weight[w] = oldW;
                                    N[L][n].delta[w] = 0.67 * dw;
                                }
                                if (bitmapWidth > 1)
                                {
                                    y = (int)(oldError * y0);
                                    x = (int)Math.Round((double)((bitmapWidth * nw) / (numWeights * numEpochs)));

                                    graphics.DrawLine(redPen, old_x - 1, bitmapHeight - old_y, x, bitmapHeight - y);
                                    if (classification)
                                    {
                                        yb = (int)(accuracy * yb0);
                                        graphics.DrawLine(bluePen, old_x - 1, bitmapHeight - old_yb, x, bitmapHeight - yb);
                                        old_yb = yb;
                                    }

                                    old_y = y;
                                    old_x = x;

                                }


                                N[L][n].oldWeight[w] = N[L][n].weight[w];
                            } //for w
                        }
                    }

                } //for e

                sw.Close();
                if (bitmapWidth > 1)
                    bitmap.Save(outputFileName + ".png");

                //getError(TrainingDataSet, 0, 0, 0, 2, 1, false);

            }

            error = oldError;
        }
         * */

        public void LT_VSS(int VSS_version)    //to train with LTA error and estimation of delta
        {

            double oldErrExp = errExp;
            int oldNumEpochs = numEpochs;
            // int oldErrMeasure = errMeasure;
            numEpochs = 5;
            errMeasure = 0;
            //errExp = 2;
            VSS(VSS_version, ref calculationProgress,1);
          //  if (VSS_version == 0)
          //      delta = getError(TrainingDataSet, 0, 0, 0, 2, 0, true, -1);
          //  else
                delta = getError_ST(TrainingDataSet, 0,0,0, 2, 0, true, -1);
            numEpochs = oldNumEpochs;
            errMeasure = 2; // oldErrMeasure;
            errExp = 1; // oldErrExp;
            VSS(VSS_version, ref calculationProgress, 1);
        }

        public void IM_VSS(int VSS_version)    //to train with iterative LMedS
        {

            double error = 0;
            int iterOfMedian = 5;
            errMeasure = 0;
            delta = 0.5;
            int DataLenght = TrainingDataSet.GetLength(0);
            //numEpochs = numEpochs / iterOfMedian;
            VSS(VSS_version, ref calculationProgress, 1);
            //delta = getError(TrainingDataSet, 0, 0, 0, 2, 0, true, -1);
            delta = getError_ST(TrainingDataSet, 0, 0, 0, 2, 0, true, -1);
            //errMeasure = 1;
            for (int i = 0; i < iterOfMedian; i++)
            {

                //if (VSS_version == 0)
                //{
                //    delta = getError(TrainingDataSet, 0, 0, 0, 2, 0, true, -1);
                //    error = getError(TrainingDataSet, 0, 0, 0, 1, 0, true, 1, delta);
                //    error = getError(TrainingDataSet, 0, 0, 0, 2, 0, true, -2, error);
                //}
                //else
                //{
                    delta = getError_ST(TrainingDataSet, 0, 0, 0, 2, 0, true, -1);
                    error = getError_ST(TrainingDataSet, 0, 0, 0, 1, 0, true, 1, delta);
                    error = getError_ST(TrainingDataSet, 0, 0, 0, 2, 0, true, -2, error);
           //     }


                if (DataLenght == TrainingDataSet.GetLength(0))
                    break;
                DataLenght = TrainingDataSet.GetLength(0);
                VSS(VSS_version, ref calculationProgress, 1);

            }

        }





        /*
        public double getErrorST0(double[][] DataSet1, double errorExponent = 2, int test = 0, bool outliers = true, int errMeasure = 0, double delta = 0.5)
        // all the signals for every training vector get propagated through the network to calculate the error
        {
            //  if (productUnits)          
            //      return getErrorPU(DataSet1, L1, n1, w1, errorExponent, test, outliers, errMeasure, delta);

            double Error = 0, prevError = 0, nY;
            int numVect = DataSet1.GetLength(0);
            numTestVect = numVect;
            accuracy = 0;
            double[] errorTable = new double[numVect];

            for (int vect = 0; vect < numVect; vect++)
            {

                for (int n = 0; n < Layer[0]; n++)
                    N[0][n].Y = DataSet1[vect][n];

                double maxY = -1, maxN = -1;
                for (int L = 1; L < numLayers; L++)
                {


                    for (int n = 0; n < Layer[L]; n++)
                    {
                        N[L][n].sumWX = 0;


                        if (sumMedian > 0)
                        {
                            double[] WX = new double[Layer[L - 1] + 1];  //number of Weights in the current neuron
                            for (int w = 0; w < Layer[L - 1]; w++)
                            {
                                N[L][n].sumWX += Weights[L][n][w] * N[L - 1][w].Y;
                                WX[w] = Weights[L][n][w] * N[L - 1][w].Y;
                            }

                            N[L][n].sumWX += Weights[L][n][Layer[L - 1]]; //bias  
                            WX[Layer[L - 1]] = Weights[L][n][Layer[L - 1]]; //bias

                            Array.Sort(WX);
                            int mid = WX.Length / 2;
                            double medianWX = (WX.Length % 2 != 0) ? WX[mid] : (WX[mid] + WX[mid + 1]) / 2;
                            nY = (1 - sumMedian) * N[L][n].sumWX + sumMedian * medianWX;
                        }
                        else
                        {
                            for (int w = 0; w < Layer[L - 1]; w++)
                            {
                                N[L][n].sumWX += Weights[L][n][w] * N[L - 1][w].Y;
                            }
                            N[L][n].sumWX += Weights[L][n][Layer[L - 1]]; //bias
                            nY = N[L][n].sumWX;
                        }


                        N[L][n].Y = Math.Tanh(nY);

                        if (L == numLayers - 1)
                        {

                            if (outlierErrorCoefficiant > 0)
                            {
                                outlierCoef = DataSet1[vect][numInputs + 1];

                                if (outlierCoef < oc)
                                    outlierCoef = oc;

                                outlierCoef = oc + Math.Pow((outlierCoef - oc), 2);
                            }


                            if (classification)
                            {
                                if (N[L][n].Y > maxY)
                                {
                                    maxY = N[L][n].Y;
                                    maxN = n;
                                }

                                int y = (int)DataSet1[vect][Layer[0]] - 1;
                                //class numbering starts from 1
                                if (n == y)  //N[L][y].Y is expected to be 1;
                                {
                                    if (N[L][n].Y < offset)
                                        Error += Math.Pow(Math.Abs(N[L][n].Y - 1), errorExponent) / outlierCoef;
                                }
                                else  //N[L][y].Y is expected to be -1;
                                {
                                    if (N[L][n].Y > -offset)
                                        Error += Math.Pow(Math.Abs(N[L][n].Y + 1), errorExponent) / outlierCoef;
                                }

                                // if (test > 0)
                                //    sw.Write("  " + Math.Round(N[L][n].Y, 4));

                            }
                            else
                            {
                                if (outputNeuronLinearForRegression)
                                    N[L][n].Y = nY;

                                Error += Math.Pow(Math.Abs(N[L][n].Y - DataSet1[vect][Layer[0]]), errorExponent) / outlierCoef;
                            }
                        }

                    }

                }
                if (maxN == DataSet1[vect][Layer[0]] - 1) // - lastColumnContainsOutliers)
                    accuracy++;

                // to calculate quantile-based error 
                errorTable[vect] = Error - prevError;
                prevError = Error;

            }

            if (errMeasure > 0)
            {
                //calculation of the quantile-based or trimmed error
                Array.Sort(errorTable);
                int midErrors = (int)(numVect * delta);
                if (midErrors > numVect)
                    midErrors = numVect;
                if (midErrors < 1)
                    midErrors = 1;

                //trimmed mean: delta smallest errors out of numVect are summed
                if (errMeasure > 1)
                {
                    Error = 0;
                    for (int i = 0; i < midErrors; i++)
                    {
                        Error += errorTable[i];
                    }
                    Error = Error / midErrors;
                }
                //delta-rank quantile error: delta=0.5 is median error
                else
                {
                    //Error = (numVect % 2 != 0) ? errorTable[midErrors - 1] : (errorTable[midErrors - 1] + errorTable[midErrors]) / 2;
                    Error = errorTable[midErrors - 1];

                }
            }
            else if (errMeasure == -1)  // to estimate delta for robust LT learning
            {
                double meanError = 0;
                //Error = 0;
                for (int i = 0; i < numVect; i++)
                {
                    meanError += errorTable[i];
                }
                meanError = meanError / numVect;
                //meanError = Error / numVect;

                //calculating mad
                Error = 0;
                for (int i = 0; i < numVect; i++)
                {
                    Error += Math.Abs(errorTable[i] - meanError);
                }
                Error = Error / numVect;
                Error *= 3;

                int deltaCounter = 0;   //to count errors smaller than 3*mad

                for (int i = 0; i < numVect; i++)
                {
                    if (Math.Abs(errorTable[i]) < Error) deltaCounter++;
                }

                Error = (double)deltaCounter / (double)numVect;
                //if (delta > 1) delta = 1;


                //Array.Sort(errorTable);
            }
            else if (errMeasure == -2)   //to remove outliers for ILMedS learning
            {
                int N = TrainingDataSet.GetLength(0);
                int d = TrainingDataSet[0].GetLength(0);
                int reducedIndex = 0;
                double sigma = 1.4826 * (1 + 5 / (double)(N - d - 1)) * Math.Sqrt(delta);
                sigma = 2.5 * Math.Pow(sigma, 2); //threshold
                double[][] reducedDataSet;
                reducedDataSet = new double[N][];
                for (int vx = 0; vx < N; vx++)
			       reducedDataSet[vx] = new double[d];
            


                for (int i = 0; i < N; i++) //remove from the training set patterns with error<sigma
                {
                    if (errorTable[i] < sigma)
                    {
                        for (int j = 0; j < d; j++)
                        {
                            reducedDataSet[reducedIndex][j] = TrainingDataSet[i][j];
                        }
                        reducedIndex++;
                    }

                }
                if (reducedIndex > 1)
                {
                    double[][] finalDataSet = new double[reducedIndex][];
                    for (int vx = 0; vx < reducedIndex; vx++)
			            finalDataSet[vx] = new double[d];
                      
                    Array.ConstrainedCopy(reducedDataSet, 0, finalDataSet, 0, reducedIndex);
                    TrainingDataSet = finalDataSet;
                }


            }

            return Error;
        }
        */

    }
}
