﻿using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
using System.Drawing;

namespace MLP
{
    partial class Network
    {
        //Backpropagation and Rprop
        //Backpropagation and Rprop



        public int max_epoch;
        public double eta = 0.03, alpha = 0.3, BPstep = 0.003, etaRpropPlus = 0, etaRpropMinus = 0,
                      minDeltaRprop = 0.00001, maxDeltaRprop=5.0;
        bool logWeights = false;
        StreamWriter swb;

        double[][][] dW, olddW, deltaW, olddeltaW;
        bool[][][] dWsignChanged;

        public void BP(double etaB, double alphaB, double etaRpropPlus = 1.2, double etaRpropMinus = 0.5)
        {
            eta = etaB;
            alpha = alphaB;
            this.etaRpropPlus = etaRpropPlus;
            this.etaRpropMinus = etaRpropMinus;

            max_epoch = numEpochs;
            swb = new StreamWriter(outputFileName + "_trn_e.txt");




            double y0 = (double) bitmapHeight / numVectors;
            if (!classification)
                y0 *= 0.5;
            double yb0 = y0;
            if (!classification)
                y0 /= 2;
            int x, y, old_x = 1, old_y = bitmapHeight, old_yb=0, yb;
            int nw = 0;
            Graphics graphics;
            Bitmap bitmapB;
            Pen redPen, grayPen, bluePen;

                bitmapB = new Bitmap(bitmapWidth, bitmapHeight);
                redPen = new Pen(Color.Red, 3);
                grayPen = new Pen(Color.Gray, 3);
                bluePen = new Pen(Color.Blue, 3);
                graphics = Graphics.FromImage(bitmapB);
            if (bitmapWidth > 1)
            {
                graphics.FillRectangle(new SolidBrush(Color.White), 0, bitmapWidth, 0, bitmapHeight);
                graphics.DrawLine(grayPen, 0, 0, bitmapWidth, 0);
                graphics.DrawLine(grayPen, 0, bitmapHeight - 1, bitmapWidth, bitmapHeight - 2);
                x = (int)Math.Round((double)((bitmapWidth * nw) / max_epoch));
                graphics.DrawLine(grayPen, x, 0, x, bitmapHeight);


                if (max_epoch < 303)
                {
                    for (int e = 0; e < max_epoch; e+=10)                    
                        {
                            x = (int)Math.Round((double)((bitmapWidth * e) / numEpochs));
                            graphics.DrawLine(grayPen, x, 0, x, bitmapHeight);
                        }
                }
                else if (max_epoch < 3030)
                {
                    for (int e = 0; e < max_epoch; e+=100)                    
                        {
                            x = (int)Math.Round((double)((bitmapWidth * e) / numEpochs));
                            graphics.DrawLine(grayPen, x, 0, x, bitmapHeight);
                        }
                }
                else
                {
                    for (int e = 0; e < max_epoch; e+=1000)                       
                        {
                            x = (int)Math.Round((double)((bitmapWidth * e) / numEpochs));
                            graphics.DrawLine(grayPen, x, 0, x, bitmapHeight);
                        }
                }
                
            }
            double eta1 = eta;

            swb.WriteLine("epoch     error");

            dW = new double[3][][];
            olddW = new double[3][][];
            deltaW = new double[3][][];
            olddeltaW = new double[3][][];
            dWsignChanged = new bool[3][][];
            for (int L = 1; L < numLayers; L++)
            {
                dW[L] = new double[Layer[L]][];
                olddW[L] = new double[Layer[L]][];
                deltaW[L] = new double[Layer[L]][];
                olddeltaW[L] = new double[Layer[L]][];
                dWsignChanged[L] = new bool[Layer[L]][];
                for (int n = 0; n < Layer[L]; n++)
                {
                    dW[L][n] = new double[Layer[L - 1] + 1];
                    olddW[L][n] = new double[Layer[L - 1] + 1];
                    deltaW[L][n] = new double[Layer[L - 1] + 1];
                    olddeltaW[L][n] = new double[Layer[L - 1] + 1];
                    dWsignChanged[L][n] = new bool[Layer[L - 1] + 1];
                }
            }

            for (int L = 1; L < numLayers; L++)
                for (int n = 0; n < Layer[L]; n++)
                    for (int w = 0; w <= Layer[L - 1]; w++)
                    {
                        dW[L][n][w] = 0;
                        N[L][n].oldWeight[w] = 0;
                        olddW[L][n][w] = 0;
                        deltaW[L][n][w] = 0.1;
                        olddeltaW[L][n][w] = 0.1;
                        dWsignChanged[L][n][w] = false;
                    }


            for (int e = 0; e < max_epoch; e++)
            {

                for (int L = 1; L < 2; L++)
                    for (int n = 0; n < Layer[L]; n++)
                        for (int w = 0; w <= Layer[L - 1]; w++)
                        {
                           // olddW[L][n][w] = dW[L][n][w];
                            N[L][n].oldWeight[w] = N[L][n].weight[w];
                            dW[L][n][w] = 0;
                        }


                CalculateGradient();
                

                swb.WriteLine("epoch={0,0}  error={1:F6}", e, error / numVectors);
                
                if (trainingAlgorithm == "BP")               
                    UpdateWeightsBP();
                else if (trainingAlgorithm == "Rprop")                
                    UpdateWeightsRprop(e);
                else if (trainingAlgorithm == "Quickprop")
                    UpdateWeightsQuickprop(e);

                swb.WriteLine();

                if (bitmapWidth > 1)
                {
                    y = (int)(error * y0);                   
                    x = (int)Math.Round((double)((bitmapWidth * e) / numEpochs));
                    if (e > 0)
                    {
                        graphics.DrawLine(redPen, old_x - 1, bitmapHeight - old_y, x, bitmapHeight - y);
                        if (classification)
                        {
                            yb = (int)(accuracy * yb0);
                            graphics.DrawLine(bluePen, old_x - 1, bitmapHeight - old_yb, x, bitmapHeight - yb);
                            old_yb = yb;
                        }
                    }
                    old_y = y;                   
                    old_x = x;
                }

            }

            swb.Close();
                           
            if (bitmapWidth > 1)
                bitmapB.Save(outputFileName + ".png");
        }


        private void CalculateGradient()
        {

            double SK1, eta1 = 1;// = eta;
            double[] Dlt;


            error = 0;
            accuracy = 0;

            for (int v = 0; v < numVectors; v++)
            {

                //Propagate signals and get error   

                /*N[1][0].weight[0] = -0.27;
                N[1][0].weight[1] = -0.48;
                N[1][1].weight[0] = -0.41;
                N[1][1].weight[1] = -0.13;
                N[2][0].weight[0] = 0.09;
                N[2][0].weight[1] = -0.17;
                N[2][0].weight[2] = 0.48;*/

                for (int n = 0; n < Layer[0]; n++)
                    N[0][n].Y = TrainingDataSet[v][n];

                for (int L = 1; L < numLayers; L++)
                {
                    for (int n = 0; n < Layer[L]; n++)
                    {
                        N[L][n].sumWX = 0;
                        for (int w = 0; w < N[L][n].weight.Length - 1; w++)
                            N[L][n].sumWX += N[L][n].weight[w] * N[L - 1][w].Y;
                        N[L][n].sumWX += N[L][n].weight[N[L][n].weight.Length - 1]; //?

                        //double nY = N[L][n].sumWX;
                        if (!classification && L == 2)
                            N[L][n].Y = N[L][n].sumWX;
                        else
                            N[L][n].Y = 1 / (1 + Math.Exp(-N[L][n].sumWX)); //logistic sigmoid

                        //  if (nY >= -3 && nY <= 3) N[L][n].Y = Math.Tanh(nY) / 0.995;
                        //  else if (nY <= -3) N[L][n].Y = -1;
                        //  else N[L][n].Y = 1;
                    }
                }




                Dlt = new double[Layer[2]];

                if (classification)
                {
                    int maxN = -1;
                    double maxY = -1;


                    int y = (int)TrainingDataSet[v][Layer[0]] - 1;
                    for (int n = 0; n < Layer[2]; n++)
                    {
                        //class numbering starts from 1
                        if (n == y)  //N[L][y].Y is expected to be 1;
                        {
                            Dlt[n] = -2*(1 - N[2][n].Y) * N[2][n].Y * (1 - N[2][n].Y) / numVectors;//- N[2][n].Y * (1 - N[2][n].Y) * 2 / numVectors; 
                            error += Math.Pow(1 - N[2][n].Y, 2.0);
                        }
                        else  //N[L][y].Y is expected to be 0;
                        {
                            Dlt[n] = -2*(0 - N[2][n].Y) * N[2][n].Y * (1 - N[2][n].Y) / numVectors;//- N[2][n].Y * (1 - N[2][n].Y) * 2 / numVectors; 
                            error += Math.Pow(N[2][n].Y, 2.0);
                        }



                        if (N[2][n].Y > maxY)
                        {
                            maxY = N[2][n].Y;
                            maxN = n;
                        }
                    }

                    if (maxN == TrainingDataSet[v][Layer[0]] - 1) 
                        accuracy++;


                }
                else
                {
                    //linear transfer function in the output neuron for regression
                    Dlt[0] = -2 * (TrainingDataSet[v][Layer[0]] - N[2][0].Y) / numVectors;// * (-2); //*N[2][0].Y * (1 - N[2][0].Y);
                    swb.WriteLine("t=" + TrainingDataSet[v][Layer[0]] + "   y=" + N[2][0].Y);
                    error += Math.Pow(N[2][0].Y - TrainingDataSet[v][Layer[0]], 2.0);
                }
                

                //output layer
                //  for (int n = 0; n < Layer[2] - 1; n++)
                swb.WriteLine("n, w, dW[2][n][w], eta1, Dlt[n], N[1][w].Y");
                for (int n = 0; n < Layer[2]; n++)
                    for (int w = 0; w <= Layer[1]; w++)
                    {
                        if (w < Layer[1])
                        {
                            dW[2][n][w] += eta1 * Dlt[n] * N[1][w].Y;
                            swb.WriteLine("{0}   {1}   {2:F4}   {3:F4}   {4:F4}   {5:F4}", n, w, dW[2][n][w], eta1, Dlt[n], N[1][w].Y);
                        }
                        else
                        {
                            dW[2][n][w] += eta1 * Dlt[n]; //bias
                            swb.WriteLine("{0}   {1}   {2:F4}   {3:F4}   {4:F4}   bias", n, w, dW[2][n][w], eta1, Dlt[n]);
                        }
                    }


               // mtp = 1.0;
                //hidden layer
                // for (int n = 0; n < Layer[1] - 1; n++)
                for (int n = 0; n < Layer[1]; n++)
                    for (int w = 0; w <= Layer[0]; w++)
                    {
                        SK1 = 0;
                        for (int kk = 0; kk < Layer[2]; kk++)
                            SK1 += Dlt[kk] *N[2][kk].weight[n];

                        // SK1 *= mtp;

                        if (w < Layer[0])
                            dW[1][n][w] += eta1 * SK1 * N[1][n].Y * (1 - N[1][n].Y) * N[0][w].Y;
                        else
                            dW[1][n][w] += eta1 * SK1 * N[1][n].Y * (1 - N[1][n].Y); //bias
                    }
                
            }
            
        }


        private void UpdateWeightsBP()
        {

            swb.WriteLine("L, n, w, olddW[L][n][w], dW[L][n][w], w, N[L][n].weight[w]");
          //eta = -Math.Abs(eta);

            double minGrad = 10E-9;//
            bool isMinGrad = false;


            for (int L = 1; L < numLayers; L++)
                for (int n = 0; n < Layer[L]; n++)
                    for (int w = 0; w <= Layer[L - 1]; w++)
                    {
                        if (Math.Abs(dW[L][n][w]) < minGrad)
                        {

                            isMinGrad = true;
                        }
                    }


            if (!isMinGrad)
            {
                for (int L = 1; L < numLayers; L++)
                    for (int n = 0; n < Layer[L]; n++)
                        for (int w = 0; w <= Layer[L - 1]; w++)
                        {




                            if (dW[L][n][w] > 1000)
                                dW[L][n][w] = 1000;
                            else if (dW[L][n][w] < -1000)
                                dW[L][n][w] = -1000;

                            if (w < Layer[L - 1])
                                N[L][n].weight[w] -= eta * (dW[L][n][w] + alpha * olddW[L][n][w]);
                            else
                                N[L][n].weight[w] -= eta * (dW[L][n][w] + alpha * olddW[L][n][w]);

                            if (N[L][n].weight[w] > 1000)
                                N[L][n].weight[w] = 1000;
                            else if (N[L][n].weight[w] < -1000)
                                N[L][n].weight[w] = -1000;

                            swb.WriteLine("{0}  {1}  {2}  {3:F5}  {4:F5}  w  {5:F5}", L, n, w, olddW[L][n][w], dW[L][n][w], N[L][n].weight[w]);
                            olddW[L][n][w] = dW[L][n][w];

                        }
            }

            if (logWeights)
            {

                swb.WriteLine("error=" + error);

                double V_len = 0;
                for (int L = 1; L < numLayers; L++)
                    for (int n = 0; n < Layer[L]; n++)
                        for (int w = 0; w <= Layer[L - 1]; w++)
                            V_len += dW[L][n][w] * dW[L][n][w];

                V_len = Math.Sqrt(V_len);

                swb.WriteLine(V_len);
                for (int L = 1; L < numLayers; L++)
                    for (int n = 0; n < Layer[L]; n++)
                        for (int w = 0; w <= Layer[L - 1]; w++)
                            swb.WriteLine(L + " " + n + " " + w + "  " + dW[L][n][w] / V_len);

                swb.WriteLine();
            }

        }



        private void UpdateWeightsQuickprop(int e)
        {


            if (e == 0)
            {
                for (int L = 1; L < numLayers; L++)
                    for (int n = 0; n < Layer[L]; n++)
                        for (int w = 0; w <= Layer[L - 1]; w++)
                        {
                            if (w < Layer[L - 1])
                            {
                                olddeltaW[L][n][w] = eta * (dW[L][n][w]);
                                N[L][n].weight[w] += eta * (dW[L][n][w]);

                            }
                            else
                            {
                                olddeltaW[L][n][w] = -eta * (dW[L][n][w]);
                                N[L][n].weight[w] -= eta * (dW[L][n][w]);
                            }

                            olddW[L][n][w] = dW[L][n][w];
                            
                        }
                return;
            }

            swb.WriteLine("L, n, w, olddW[L][n][w], dW[L][n][w], olddeltaW[L][n][w], deltaW[L][n][w], w, N[L][n].weight[w]");
            for (int L = 1; L < numLayers; L++)
                for (int n = 0; n < Layer[L]; n++)
                    for (int w = 0; w <= Layer[L - 1]; w++)
                    {
                        if (olddW[L][n][w] - dW[L][n][w] < 0.000000001 && olddW[L][n][w] - dW[L][n][w] > 0)
                            deltaW[L][n][w] = maxDeltaRprop;
                        else if (olddW[L][n][w] - dW[L][n][w] > -0.000000001 && olddW[L][n][w] - dW[L][n][w] < 0)
                            deltaW[L][n][w] = -maxDeltaRprop;
                        else
                            deltaW[L][n][w] = dW[L][n][w] / (olddW[L][n][w] - dW[L][n][w]) * olddeltaW[L][n][w];

                        if (deltaW[L][n][w] > maxDeltaRprop)
                            deltaW[L][n][w] = maxDeltaRprop;

                        if (deltaW[L][n][w] < -maxDeltaRprop)
                            deltaW[L][n][w] = -maxDeltaRprop;

                        N[L][n].weight[w] += deltaW[L][n][w];
                        swb.WriteLine("{0}  {1}  {2}  {3:F4}  {4:F4}  {5:F4}  {6:F4}  w  {7:F4}", L, n, w, olddW[L][n][w], dW[L][n][w], olddeltaW[L][n][w], deltaW[L][n][w], N[L][n].weight[w]);

                        olddW[L][n][w] = dW[L][n][w];
                        olddeltaW[L][n][w] = deltaW[L][n][w];                            
                    }
        }




        private void UpdateWeightsRprop(int e)
        {

            swb.WriteLine("L, n, w, olddW[L][n][w], dW[L][n][w], deltaW[L][n][w], pmz N[L][n].weight[w]");
            for (int L = 1; L < numLayers; L++)
                for (int n = 0; n < Layer[L]; n++)
                    for (int w = 0; w <= Layer[L - 1]; w++)
                    {
                        if (olddW[L][n][w] * dW[L][n][w] > 0)
                        {
                            deltaW[L][n][w] = Math.Min(deltaW[L][n][w] * etaRpropPlus, maxDeltaRprop);
                            N[L][n].weight[w] += deltaW[L][n][w] * Math.Sign(dW[L][n][w]);
                            swb.WriteLine("{0}  {1}  {2}  {3:F4}  {4:F4}  {5:F4}  p  {6:F4}", L, n, w, olddW[L][n][w], dW[L][n][w], deltaW[L][n][w], N[L][n].weight[w]);
                            olddW[L][n][w] = dW[L][n][w];
                        }
                        else if (olddW[L][n][w] * dW[L][n][w] < 0)
                        {
                            deltaW[L][n][w] = Math.Max(deltaW[L][n][w] * etaRpropMinus, minDeltaRprop);
                            swb.WriteLine("{0}  {1}  {2}  {3:F4}  {4:F4}  {5:F4}  m  {6:F4}", L, n, w, olddW[L][n][w], dW[L][n][w], deltaW[L][n][w], N[L][n].weight[w]);
                            olddW[L][n][w] = 0;
                        }
                        else
                        {
                            N[L][n].weight[w] += deltaW[L][n][w] * Math.Sign(dW[L][n][w]);
                            swb.WriteLine("{0}  {1}  {2}  {3:F4}  {4:F4}  {5:F4}  z  {6:F4}", L, n, w, olddW[L][n][w], dW[L][n][w], deltaW[L][n][w], N[L][n].weight[w]);
                            olddW[L][n][w] = dW[L][n][w];

                        }
                    }
        }




    }
}

 