﻿using System;
using System.IO;
using System.Drawing;
using System.Collections.Generic;

namespace MLP
{
    partial class Network
    {


        private void CreateNetworkCross()
        {

            N = new Neuron[numLayers][];
            for (int Lx = 0; Lx < numLayers; Lx++)
                N[Lx] = new Neuron[Layer[Lx]];

            int WeightCounter = Layer[0] + Layer[1];
            int maxNeurons = numInputs;
            for (int i = 1; i < numLayers; i++)
                if (maxNeurons < Layer[i])
                    maxNeurons = Layer[i];

            int weightsC = 0;
            if (maxNeurons + 1 > WeightCounter)
                weightsC = maxNeurons + 1;
            else
                weightsC = WeightCounter + 1;


            Weights = new double[numLayers][][];
            Delta = new double[numLayers][][];
            for (int L0 = 1; L0 < numLayers; L0++)
            {
                Weights[L0] = new double[Layer[L0]][];
                Delta[L0] = new double[Layer[L0]][];
                for (int n = 0; n < Layer[L0]; n++)
                {
                    Weights[L0][n] = new double[weightsC];
                    Delta[L0][n] = new double[weightsC];
                }
            }

            SignalTableY = new double[numVectors][][];
            SignalTableSumWX = new double[numVectors][][];
            for (int v = 0; v < numVectors; v++)
            {
                SignalTableY[v] = new double[numLayers][];
                SignalTableSumWX[v] = new double[numLayers][];
                for (int L = 0; L < numLayers; L++)
                {
                    SignalTableY[v][L] = new double[Layer[L]];
                    SignalTableSumWX[v][L] = new double[Layer[L]];
                }
            }




          //  deltaa = new double[weights.GetLength(0), weights.GetLength(1), weights.GetLength(2)];
          //  SignalTableY = new double[TrainingDataSet.GetLength(0), numLayers, maxNeurons];
           // SignalTableSumWX = new double[TrainingDataSet.GetLength(0), numLayers, maxNeurons];

            int wff = 1;
            string[] WeightsFromFile = { "" };
            int numW = 1000;
            Random rnd = new Random();

            if (!RandomWeights)
            {
                if (!File.Exists("weights.txt"))
                {

                    WeightsFromFile = new string[numW];
                    for (int i = 0; i < numW; i++)
                        WeightsFromFile[i] = rnd.NextDouble().ToString();

                    File.WriteAllLines("weights.txt", WeightsFromFile);
                }
                else
                    WeightsFromFile = File.ReadAllLines("weights.txt");
            }


            //weightsLength0 = numLayers;
            //weightsLength1 = maxNeurons;
            //weightsLength2 = weights.GetLength(2);
            //signaltableL2 = maxNeurons;
            //signaltableL1 = numLayers;
            //DataSetL1 = TrainingDataSet.GetLength(1);
            //weightsGetLength1 = weights.GetLength(1);
            //weightsGetLength2 = weights.GetLength(2);


            int Ly = 1;
            for (int n = 0; n < Layer[Ly]; n++)
            {
                for (int w = 0; w < Layer[0] + 1; w++)
                {
                    if (RandomWeights)
                    {
                        Weights[Ly][n][w] = 0.5 - rnd.NextDouble(); //adding random weights
                        Delta[Ly][n][w] = dw0; //delta
                    }
                    else
                    {
                        Weights[Ly][n][w] = 0.5 - Convert.ToDouble(WeightsFromFile[wff++]);
                        Delta[Ly][n][w] = dw0; //delta
                    }
                }
            }


            Ly = 2;
            for (int n = 0; n < Layer[Ly]; n++)
            {
                for (int w = 0; w < WeightCounter + 1; w++)
                {
                    if (RandomWeights)
                    {
                        Weights[Ly][n][w] = 0.5 - rnd.NextDouble(); //adding random weights
                        Delta[Ly][n][w] = dw0; //delta
                    }
                    else
                    {
                        Weights[Ly][n][w] = 0.5 - Double.Parse(WeightsFromFile[wff++], System.Globalization.CultureInfo.InvariantCulture); //adding weights from file
                        Delta[Ly][n][w] = dw0; //delta
                    }
                }
            }


        }




        public void VSS_ST_3() // MLP network learning algorithm: finds the optimal Weights that minimize MSE
        // that can be any MLP learning algorithm, I use a simplified version of VSS here
        // just a small value dw is added to each single weight and the error is calculated if the error decreases,
        // w = w + dw, else dw is subtracted t=from that the weight and the error is calculated again
        // and the error is calculated if the error decreases, w = w - dw, otherwise w remains unchanged.
        // This operation is repeated with each weight and the whole iteration is repeated several times until
        // decreases significiantly
        {
            FillSignalTableCross(TrainingDataSet);
            oldError = getError_ST(TrainingDataSet, 0, 0, 0, errExp, 0, true, errMeasure, delta);//(TrainingDataSet, 0, 0, 0);
            double Error = oldError;
            double dw = dw0;
            //  bool ex = false;

            bool useBitmap = true;
            if (!useBitmap)
                bitmapWidth = 1;

            Bitmap bitmap = new Bitmap(bitmapWidth, bitmapHeight);
            Pen redPen = new Pen(Color.Red, 3);
            Pen grayPen = new Pen(Color.Gray, 3);
            Pen bluePen = new Pen(Color.Blue, 3);
            Graphics graphics = Graphics.FromImage(bitmap);
            graphics.FillRectangle(new SolidBrush(Color.White), 0, bitmapWidth, 0, bitmapHeight);

            int numWeights = 0, nw = 0;
            for (int L = 1; L < numLayers; L++)
                numWeights += Layer[L] * Layer[L - 1] + 1;

            double y0 = (double)bitmapHeight / oldError;



            double yb0 = y0;
            if (!classification)
                y0 /= 2;
            int x, y, old_x = 1, old_y = bitmapHeight, old_yb = 0, yb;
            if (bitmapWidth > 1)
            {
                graphics.DrawLine(grayPen, 0, 0, bitmapWidth, 0);
                graphics.DrawLine(grayPen, 0, bitmapHeight - 1, bitmapWidth, bitmapHeight - 2);
                x = (int)Math.Round((double)((bitmapWidth * nw) / (numWeights * numEpochs)));
                graphics.DrawLine(grayPen, x, 0, x, bitmapHeight);
                for (int e = 1; e <= numEpochs; e++)
                {
                    nw = numWeights * e;
                    x = (int)Math.Round((double)((bitmapWidth * nw) / (numWeights * numEpochs)));
                    graphics.DrawLine(grayPen, x, 0, x, bitmapHeight);
                }
            }
            nw = 0;



            weightFile = new StreamWriter(outputFileName + "_Weights.txt");
            signalFile = new StreamWriter(outputFileName + "_signals.txt");
            errorFile = new StreamWriter(outputFileName + "_errors.txt");

            if (saveLearning)
            {
                errorFile.Write("Network: ");
                for (int i = 0; i < numLayers - 1; i++)
                    errorFile.Write(Layer[i] + "-");
                errorFile.WriteLine(Layer[numLayers - 1]);

                if (classification)
                {
                    errorFile.WriteLine("epoch L n w error accuracy");
                    errorFile.WriteLine("0 0 0 0 " + oldError / (Layer[numLayers - 1] * numVectors) + " " + accuracy / numVectors);
                }
                else
                {
                    errorFile.WriteLine("epoch L n w error");
                    errorFile.WriteLine("0 0 0 0 " + oldError / numVectors);
                }

                weightFile.Write("Network: ");
                for (int i = 0; i < numLayers - 1; i++)
                    weightFile.Write(Layer[i] + "-");
                weightFile.Write(Layer[numLayers - 1]);
                weightFile.WriteLine("   L/n/w");

                weightFile.Write("epoch");
                for (int L = 1; L < numLayers; L++)
                    for (int n = 0; n < Layer[L]; n++)
                        for (int w = 0; w < Layer[L - 1] + 1; w++)
                            weightFile.Write(" " + L + "/" + n + "/" + w);
                weightFile.WriteLine();

                weightFile.Write("0 ");
                for (int L = 1; L < numLayers; L++)
                    for (int n = 0; n < Layer[L]; n++)
                        for (int w = 0; w < Layer[L - 1] + 1; w++)
                            weightFile.Write(" " + Weights[L][n][w]);
                weightFile.WriteLine();

                signalFile.Write("Network: ");
                for (int i = 0; i < numLayers - 1; i++)
                    signalFile.Write(Layer[i] + "-");
                signalFile.Write(Layer[numLayers - 1]);
                signalFile.WriteLine("   L/n");


                signalFile.WriteLine("epoch=0");
                signalFile.Write("vect");
                for (int L = 1; L < numLayers; L++)
                    for (int n = 0; n < Layer[L]; n++)
                        signalFile.Write(" " + L + "/" + n);
                signalFile.WriteLine();

                for (int v = 0; v < numVectors; v++)
                {
                    signalFile.Write(v);
                    for (int L = 1; L < numLayers; L++)
                        for (int n = 0; n < Layer[L]; n++)
                            signalFile.Write(" " + SignalTableY[v][L][n]);
                    signalFile.WriteLine();
                }
            }


            for (int e = 1; e <= numEpochs; e++)
            {


                dw *= 0.995;
                for (int L = 1; L < numLayers; L++)
                {
                    for (int n = 0; n < Layer[L]; n++)
                    {
                        for (int w = 0; w < Layer[L - 1] + 1; w++)
                        {
                            nw++;
                            bool errorDecreased = false;
                            dw = 0.67 * Delta[L][n][w];
                            double oldW = Weights[L][n][w];
                            oldWeightTest = Weights[L][n][w];
                            Weights[L][n][w] += dw;
                            rec2(L, n, w);
                            if ((Error = getError_ST(TrainingDataSet, L, n, w, errExp, 0, true, errMeasure, delta)) < oldError)
                            {
                                oldError = Error;
                                errorDecreased = true;
                                Delta[L][n][w] = dw;
                                oldWeightTest = Weights[L][n][w];
                                Weights[L][n][w] += dw;
                                rec2(L, n, w);
                                if ((Error = getError_ST(TrainingDataSet, L, n, w, errExp, 0, true, errMeasure, delta)) < oldError)
                                {
                                    oldError = Error;
                                    Delta[L][n][w] = 2 * dw;
                                }
                                else
                                {
                                    oldWeightTest = Weights[L][n][w];
                                    Weights[L][n][w] -= dw;
                                    rec2(L, n, w);
                                }
                            }
                            else
                            {
                                oldWeightTest = Weights[L][n][w];
                                Weights[L][n][w] -= 2 * dw;
                                rec2(L, n, w);
                                if ((Error = getError_ST(TrainingDataSet, L, n, w, errExp, 0, true, errMeasure, delta)) < oldError)
                                {
                                    oldError = Error;
                                    errorDecreased = true;
                                    oldWeightTest = Weights[L][n][w];
                                    Weights[L][n][w] -= dw;
                                    Delta[L][n][w] = -dw;
                                    rec2(L, n, w);
                                    if ((Error = getError_ST(TrainingDataSet, L, n, w, errExp, 0, true, errMeasure, delta)) < oldError)
                                    {
                                        oldError = Error;
                                        Delta[L][n][w] = -2 * dw;
                                    }
                                    else
                                    {
                                        oldWeightTest = Weights[L][n][w];
                                        Weights[L][n][w] += dw;
                                        rec2(L, n, w);
                                    }
                                }
                                if (!errorDecreased)
                                {
                                    oldWeightTest = Weights[L][n][w];
                                    Weights[L][n][w] = oldW;
                                    rec2(L, n, w);
                                    Delta[L][n][w] = 0.67 * dw;
                                }
                                else if (saveLearning)
                                {
                                    if (classification)
                                        errorFile.WriteLine(e + " " + L + " " + n + " " + w + " " + oldError / (Layer[numLayers - 1] * numVectors) + " " + accuracy / numVectors);
                                    else
                                        errorFile.WriteLine(e + " " + L + " " + n + " " + w + " " + oldError / numVectors);
                                }
                            }

                            if (bitmapWidth > 1)
                            {
                                y = (int)(oldError * y0);
                                x = (int)Math.Round((double)((bitmapWidth * nw) / (numWeights * numEpochs)));

                                graphics.DrawLine(redPen, old_x - 1, bitmapHeight - old_y, x, bitmapHeight - y);
                                if (classification)
                                {
                                    yb = (int)(accuracy * yb0);
                                    graphics.DrawLine(bluePen, old_x - 1, bitmapHeight - old_yb, x, bitmapHeight - yb);
                                    old_yb = yb;
                                }

                                old_y = y;
                                old_x = x;
                            }

                        }//for w
                    }//for n
                }//for L

                if (saveLearning)
                {

                    weightFile.Write(e);
                    for (int L = 1; L < numLayers; L++)
                        for (int n = 0; n < Layer[L]; n++)
                            for (int w = 0; w < Layer[L - 1] + 1; w++)
                                weightFile.Write(" " + Weights[L][n][w]);
                    weightFile.WriteLine();


                    signalFile.WriteLine("epoch=" + e);
                    signalFile.Write("vect");
                    for (int L = 1; L < numLayers; L++)
                        for (int n = 0; n < Layer[L]; n++)
                            signalFile.Write(" " + L + "/" + n);
                    signalFile.WriteLine();

                    for (int v = 0; v < numVectors; v++)
                    {
                        signalFile.Write(v);
                        for (int L = 1; L < numLayers; L++)
                            for (int n = 0; n < Layer[L]; n++)
                                signalFile.Write(" " + SignalTableY[v][L][n]);
                        signalFile.WriteLine();
                    }
                }



            } //for e

            if (bitmapWidth > 1)
                bitmap.Save(outputFileName + ".png");

            error = oldError;
        }

        public void FillSignalTableCross(double[][] DataSet1)
        // all the signals for every training vector get propagated through the network to calculate the error
        {
            //double Error = 0;
            int numVect = DataSet1.GetLength(0);
            //accuracy = 0;
            //all the signals for every vector get propagated for the first time
            for (int vect = 0; vect < numVect; vect++)
            {
                for (int n = 0; n < Layer[0]; n++)
                    SignalTableY[vect][0][n] = DataSet1[vect][n];

               int L = 1; 
                
                    for (int n = 0; n < Layer[L]; n++)
                    {
                        SignalTableSumWX[vect][L][n] = 0;
                        for (int w = 0; w < Layer[L - 1]; w++)
                        {
                            SignalTableSumWX[vect][L][n] += Weights[L][n][w] * SignalTableY[vect][L-1][n];
                        }
                        SignalTableSumWX[vect][L][n] += Weights[L][n][Layer[L - 1]]; //bias 
                        SignalTableY[vect][L][n] = Math.Tanh(SignalTableSumWX[vect][L][n]);//y
                    }
              
              L = 2; 
               
                    for (int n = 0; n < Layer[L]; n++)
                    {
                        SignalTableSumWX[vect][L][n] = 0;
                        for (int w = 0; w < Layer[L - 1]; w++)
                            SignalTableSumWX[vect][L][n] += Weights[L][n][w] * SignalTableY[vect][L-1][n];

                        for (int w = Layer[L - 1]; w < Layer[L - 2] + Layer[L - 1]; w++)
                            SignalTableSumWX[vect][L][n] += Weights[L][n][w] * SignalTableY[vect][L - 2][(w - Layer[L - 1])];

                        SignalTableSumWX[vect][L][n] += Weights[L][n][Layer[L - 1] + Layer[L - 2]]; //bias 
                        SignalTableY[vect][L][n] = Math.Tanh(SignalTableSumWX[vect][L][n]);//y
                    }
               
            }
        }

        public void rec2(int L1, int n1, int w1) //recalculating Y and SumWX for each vector after weight change [only the neuron which weight was changed, and all in the upper layers]
        {
            if (sumMedian <= 0)
            {
                for (int vect = 0; vect < TrainingDataSet.GetLength(0); vect++)
                {
                    if (w1 == Layer[L1 - 1])
                    {
                        SignalTableSumWX[vect][L1][n1] += Weights[L1][n1][w1];
                        SignalTableSumWX[vect][L1][n1] -= oldWeightTest;
                    }
                    else
                    {
                        SignalTableSumWX[vect][L1][n1] += Weights[L1][n1][w1] * SignalTableY[vect][L1 - 1][w1];
                        SignalTableSumWX[vect][L1][n1] -= oldWeightTest * SignalTableY[vect][L1 - 1][w1];
                    }
                    double oldY = SignalTableY[vect][L1][n1];
                    SignalTableY[vect][L1][n1] = Math.Tanh(SignalTableSumWX[vect][L1][n1]);



                    if (L1 != numLayers - 1)
                    {
                        int L = L1 + 1; 
                       
                            for (int n = 0; n < Layer[L]; n++)
                            {
                                SignalTableSumWX[vect][L][n] += Weights[L][n][n1] * SignalTableY[vect][L - 1][n1];
                                SignalTableSumWX[vect][L][n] -= Weights[L][n][n1] * oldY;
                                SignalTableY[vect][L][n] = Math.Tanh(SignalTableSumWX[vect][L][n]); //y
                            }
                        
                        //for (L = L1 + 2; L < numLayers; L++)
                        //{
                        //    for (int n = 0; n < Layer[L]; n++)
                        //    {
                        //        SignalTableSumWX[vect][L][n] = 0;
                        //        for (int w = 0; w < Layer[L - 1]; w++)
                        //            SignalTableSumWX[vect][L][n] += Weights[L][n][w] * SignalTableY[vect][L-1][n];
                        //        for (int w = Layer[L - 1]; w < Layer[L - 2] + Layer[L - 1]; w++)
                        //            SignalTableSumWX[vect][L][n] += Weights[L][n][w] * SignalTableY[vect, L - 2, (w - Layer[L - 1])];
                        //        SignalTableSumWX[vect][L][n] += Weights[L, n, Layer[L - 1]+Layer[L-2]]; //bias 
                        //        SignalTableY[vect][L][n] = Math.Tanh(SignalTableSumWX[vect][L][n]);//y
                        //    }
                        //}
                    }
                }
            }
            else
            {
                double medianWX, sumMedWX;
                int mid;
                double[] WX;

                for (int vect = 0; vect < TrainingDataSet.GetLength(0); vect++)
                {
                    if (w1 == Layer[L1 - 1])
                    {
                        SignalTableSumWX[vect][L1][n1] += Weights[L1][n1][w1];
                        SignalTableSumWX[vect][L1][n1] -= oldWeightTest;
                    }
                    else
                    {
                        SignalTableSumWX[vect][L1][n1] += Weights[L1][n1][w1] * SignalTableY[vect][L1 - 1][w1];
                        SignalTableSumWX[vect][L1][n1] -= oldWeightTest * SignalTableY[vect][L1 - 1][w1];
                    }
                    double oldY = SignalTableY[vect][L1][n1];


                    WX = new double[Layer[L1 - 1] + 1];  //number of Weights in the current neuron
                    for (int w = 0; w < Layer[L1 - 1]; w++)
                    {
                        WX[w] = Weights[L1][n1][w] * N[L1 - 1][w].Y;
                    }
                    WX[Layer[L1 - 1]] = Weights[L1][n1][Layer[L1 - 1]]; //bias                   

                    Array.Sort(WX);
                    mid = WX.Length / 2;
                    medianWX = (WX.Length % 2 != 0) ? WX[mid] : (WX[mid] + WX[mid + 1]) / 2;
                    sumMedWX = (1 - sumMedian) * SignalTableSumWX[vect][L1][n1] + sumMedian * medianWX;
                    SignalTableY[vect][L1][n1] = Math.Tanh(sumMedWX);

                    if (L1 != numLayers - 1)
                    {
                       int L = L1 + 1;
                       
                            for (int n = 0; n < Layer[L]; n++)
                            {
                                SignalTableSumWX[vect][L][n] += Weights[L][n][n1] * SignalTableY[vect][L - 1][n1];
                                SignalTableSumWX[vect][L][n] -= Weights[L][n][n1] * oldY;

                                WX = new double[Layer[L - 1] + 1];  //number of Weights in the current neuron
                                for (int w = 0; w < Layer[L - 1]; w++)
                                {
                                    WX[w] = Weights[L][n][w] * N[L - 1][w].Y;
                                }
                                WX[Layer[L - 1]] = Weights[L][n][Layer[L - 1]]; //bias                   

                                Array.Sort(WX);
                                mid = WX.Length / 2;
                                medianWX = (WX.Length % 2 != 0) ? WX[mid] : (WX[mid] + WX[mid + 1]) / 2;
                                sumMedWX = (1 - sumMedian) * SignalTableSumWX[vect][L][n] + sumMedian * medianWX;
                                SignalTableY[vect][L][n] = Math.Tanh(sumMedWX);

                            }
                        
                        //for (L = L1 + 2; L < numLayers; L++)
                        //{
                        //    for (int n = 0; n < Layer[L]; n++)
                        //    {
                        //        SignalTableSumWX[vect][L][n] = 0;
                        //        for (int w = 0; w < Layer[L - 1]; w++)
                        //            SignalTableSumWX[vect][L][n] += Weights[L][n][w] * SignalTableY[vect][L-1][n];
                        //        for (int w = Layer[L - 1]; w < Layer[L - 2] + Layer[L - 1]; w++)
                        //            SignalTableSumWX[vect][L][n] += Weights[L][n][w] * SignalTableY[vect, L - 2, (w - Layer[L - 1])];

                        //        SignalTableSumWX[vect][L][n] += Weights[L, n, Layer[L - 1]+Layer[L-2]]; //bias 


                        //        WX = new double[Layer[L - 1] + 1];  //number of Weights in the current neuron
                        //        for (int w = 0; w < Layer[L1 - 1]; w++)
                        //        {
                        //            WX[w] = Weights[L, n1, w] * N[L - 1][w].Y;
                        //        }
                        //        WX[Layer[L - 1]] = Weights[L, n1, Layer[L - 1]]; //bias                   

                        //        Array.Sort(WX);
                        //        mid = WX.Length / 2;
                        //        medianWX = (WX.Length % 2 != 0) ? WX[mid] : (WX[mid] + WX[mid + 1]) / 2;
                        //        sumMedWX = (1 - sumMedian) * SignalTableSumWX[vect][L][n] + sumMedian * medianWX;
                        //        SignalTableY[vect][L][n] = Math.Tanh(sumMedWX);
                        //    }
                        //}
                    }
                }
            }
        }




       
       

    }
}


