﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Drawing;
using System.IO;

namespace MLP
{
    partial class Network
    {
               
        int[] StartVector, EndVector;
        double[] PartialError, PartialAccuracy;
        int numProcessors = 4;
        int problemSize;
        double scale = 1.0;

        public void SplitTrainingFile()
        {
            int numNeurons = 0;
            for (int i = 1; i < numLayers; i++)
                numNeurons += Layer[i];

            problemSize = TrainingDataSet.GetLength(0) * TrainingDataSet[0].GetLength(0) * numNeurons;
            numProcessors = Environment.ProcessorCount;

            if (problemSize      <     3000 * scale && numProcessors > 4)
                numProcessors = 2;
            if (problemSize      <    12000 * scale && numProcessors > 4)
                numProcessors = 4;
            if (problemSize      <    60000 * scale && numProcessors > 8)
                numProcessors = 8;
            else if (problemSize <   400000 * scale && numProcessors > 12)
                numProcessors = 12;
            else if (problemSize <  4000000 * scale && numProcessors > 16)
                numProcessors = 16;
            else if (numProcessors > 24)
                numProcessors = 24;

           
            StartVector = new int[numProcessors];
            EndVector = new int[numProcessors];
            PartialError = new double[numProcessors];           
            PartialAccuracy = new double[numProcessors];

            //int PartialDataSetSize = Convert.ToInt32((double)TrainingDataSet.GetLength(0) / (double)numProcessors);
            double PartialDataSetSize = (double)TrainingDataSet.GetLength(0) / (double)numProcessors;

            int p = 0;
            
            for (int v = 0; v < TrainingDataSet.GetLength(0); v++)
               // if (v % PartialDataSetSize == 0)
                if (v >= p * PartialDataSetSize)
                {
                    if (p < numProcessors)
                       StartVector[p] = v;
                    if (p > 0)
                        EndVector[p - 1] = v - 1;
                    p++;
                }
            StartVector[0] = 0;
            EndVector[numProcessors - 1] = TrainingDataSet.GetLength(0) - 1;
 
            //for (int p2 = 0; p2 < numProcessors; p2++) 
            //{
            //    StreamWriter sw = new StreamWriter(@"..\..\part" + p2.ToString() + ".txt");
               
            //    for (int v = StartVector[p2]; v <= EndVector[p2]; v++)
            //    {
            //        for (int a = 0; a < TrainingDataSet[0].GetLength(0); a++)
            //            sw.Write(TrainingDataSet[v, a] + " ");
            //        sw.WriteLine();
            //    }
            //    sw.Close();
            //}


        }




        public void VSS_ST_parallel(ref double calculationProgress, int numLayers1)// MLP network learning algorithm: finds the optimal weights that minimize MSE
        // that can be any MLP learning algorithm, I use a simplified version of VSS here
        // just a small value dw is added to each single weight and the error is calculated if the error decreases,
        // w = w + dw, else dw is subtracted t=from that the weight and the error is calculated again
        // and the error is calculated if the error decreases, w = w - dw, otherwise w remains unchanged.
        // This operation is repeated with each weight and the whole iteration is repeated several times until
        // decreases significiantly
        {
            SplitTrainingFile();

            FillSignalTableParallel(TrainingDataSet);
        //    DumpSignalTable("_p");
            oldError = getError_STParallel(TrainingDataSet, 0, 0, 0, errExp, 0, true, errMeasure, delta);//(TrainingDataSet, 0, 0, 0);
            double Error = oldError;
            double dw = dw0;
            //  bool ex = false;

            bool useBitmap = false;
            if (!useBitmap)
                bitmapWidth = 1;

            Bitmap bitmap = new Bitmap(bitmapWidth, bitmapHeight);
            Pen redPen = new Pen(Color.Red, 3);
            Pen grayPen = new Pen(Color.Gray, 3);
            Pen bluePen = new Pen(Color.Blue, 3);
            Graphics graphics = Graphics.FromImage(bitmap);
            graphics.FillRectangle(new SolidBrush(Color.White), 0, bitmapWidth, 0, bitmapHeight);

            int numWeights = 0, nw = 0;
            for (int L = 1; L < numLayers; L++)
                numWeights += Layer[L] * Layer[L - 1] + 1;

            double y0 = (double)bitmapHeight / oldError;



            double yb0 = y0;
            if (!classification)
                y0 /= 2;
            int x, y, old_x = 1, old_y = bitmapHeight, old_yb = 0, yb;
            if (bitmapWidth > 1)
            {
                graphics.DrawLine(grayPen, 0, 0, bitmapWidth, 0);
                graphics.DrawLine(grayPen, 0, bitmapHeight - 1, bitmapWidth, bitmapHeight - 2);
                x = (int)Math.Round((double)((bitmapWidth * nw) / (numWeights * numEpochs)));
                graphics.DrawLine(grayPen, x, 0, x, bitmapHeight);
                for (int e = 1; e <= numEpochs; e++)
                {
                    nw = numWeights * e;
                    x = (int)Math.Round((double)((bitmapWidth * nw) / (numWeights * numEpochs)));
                    graphics.DrawLine(grayPen, x, 0, x, bitmapHeight);
                }
            }
            nw = 0;



           

            if (saveLearning)
            {

                weightFile = new StreamWriter(outputFileName + "_weights.txt");
                signalFile = new StreamWriter(outputFileName + "_signals.txt");
                errorFile = new StreamWriter(outputFileName + "_errors.txt");

                errorFile.Write("Network: ");
                for (int i = 0; i < numLayers - 1; i++)
                    errorFile.Write(Layer[i] + "-");
                errorFile.WriteLine(Layer[numLayers - 1]);

                if (classification)
                {
                    errorFile.WriteLine("epoch L n w error accuracy");
                    errorFile.WriteLine("0 0 0 0 " + oldError / (Layer[numLayers - 1] * numVectors) + " " + accuracy / numVectors);
                }
                else
                {
                    errorFile.WriteLine("epoch L n w error");
                    errorFile.WriteLine("0 0 0 0 " + oldError / numVectors);
                }

                weightFile.Write("Network: ");
                for (int i = 0; i < numLayers - 1; i++)
                    weightFile.Write(Layer[i] + "-");
                weightFile.Write(Layer[numLayers - 1]);
                weightFile.WriteLine("   L/n/w");

                weightFile.Write("epoch");
                for (int L = 1; L < numLayers; L++)
                    for (int n = 0; n < Layer[L]; n++)
                        for (int w = 0; w < Layer[L - 1] + 1; w++)
                            weightFile.Write(" " + L + "/" + n + "/" + w);
                weightFile.WriteLine();

                weightFile.Write("0 ");
                for (int L = 1; L < numLayers; L++)
                    for (int n = 0; n < Layer[L]; n++)
                        for (int w = 0; w < Layer[L - 1] + 1; w++)
                            weightFile.Write(" " + Weights[L][n][w]);
                weightFile.WriteLine();

                signalFile.Write("Network: ");
                for (int i = 0; i < numLayers - 1; i++)
                    signalFile.Write(Layer[i] + "-");
                signalFile.Write(Layer[numLayers - 1]);
                signalFile.WriteLine("   L/n");


                signalFile.WriteLine("epoch=0");
                signalFile.Write("vect");
                for (int L = 1; L < numLayers; L++)
                    for (int n = 0; n < Layer[L]; n++)
                        signalFile.Write(" " + L + "/" + n);
                signalFile.WriteLine();

                for (int v = 0; v < numVectors; v++)
                {
                    signalFile.Write(v);
                    for (int L = 1; L < numLayers; L++)
                        for (int n = 0; n < Layer[L]; n++)
                            signalFile.Write(" " + SignalTableY[v][L][n]);
                    signalFile.WriteLine();
                }
            }


            for (int e = 1; e <= numEpochs; e++)
            {

                calculationProgress = e / numEpochs / numLayers1;
                dw *= 0.995;


                for (int L = 1; L < numLayers; L++)
                {
                    for (int n = 0; n < Layer[L]; n++)
                    {
                        for (int w = 0; w < Layer[L - 1] + 1; w++)
                        {
                            nw++;
                            bool errorDecreased = false;
                            dw = 0.67 * Delta[L][n][w];
                            double oldW = Weights[L][n][w];
                            oldWeightTest = Weights[L][n][w];
                            Weights[L][n][w] += dw;
                            recParallel(L, n, w);
                            if ((Error = getError_STParallel(TrainingDataSet, L, n, w, errExp, 0, true, errMeasure, delta)) < oldError)
                            {
                                oldError = Error;
                                errorDecreased = true;
                                Delta[L][n][w] = dw;
                                oldWeightTest = Weights[L][n][w];
                                Weights[L][n][w] += dw;
                                recParallel(L, n, w);
                                if ((Error = getError_STParallel(TrainingDataSet, L, n, w, errExp, 0, true, errMeasure, delta)) < oldError)
                                {
                                    oldError = Error;
                                    Delta[L][n][w] = 2 * dw;
                                }
                                else
                                {
                                    oldWeightTest = Weights[L][n][w];
                                    Weights[L][n][w] -= dw;
                                    recParallel(L, n, w);
                                }
                            }
                            else
                            {
                                oldWeightTest = Weights[L][n][w];
                                Weights[L][n][w] -= 2 * dw;
                                recParallel(L, n, w);
                                if ((Error = getError_STParallel(TrainingDataSet, L, n, w, errExp, 0, true, errMeasure, delta)) < oldError)
                                {
                                    oldError = Error;
                                    errorDecreased = true;
                                    oldWeightTest = Weights[L][n][w];
                                    Weights[L][n][w] -= dw;
                                    Delta[L][n][w] = -dw;
                                    recParallel(L, n, w);
                                    if ((Error = getError_STParallel(TrainingDataSet, L, n, w, errExp, 0, true, errMeasure, delta)) < oldError)
                                    {
                                        oldError = Error;
                                        Delta[L][n][w] = -2 * dw;
                                    }
                                    else
                                    {
                                        oldWeightTest = Weights[L][n][w];
                                        Weights[L][n][w] += dw;
                                        recParallel(L, n, w);
                                    }
                                }
                                if (!errorDecreased)
                                {
                                    oldWeightTest = Weights[L][n][w];
                                    Weights[L][n][w] = oldW;
                                    recParallel(L, n, w);
                                    Delta[L][n][w] = 0.67 * dw;
                                }
                                else if (saveLearning)
                                {
                                    if (classification)
                                        errorFile.WriteLine(e + " " + L + " " + n + " " + w + " " + oldError / (Layer[numLayers - 1] * numVectors) + " " + accuracy / numVectors);
                                    else
                                        errorFile.WriteLine(e + " " + L + " " + n + " " + w + " " + oldError / numVectors);
                                }
                            }

                            if (bitmapWidth > 1)
                            {
                                y = (int)(oldError * y0);
                                x = (int)Math.Round((double)((bitmapWidth * nw) / (numWeights * numEpochs)));

                                graphics.DrawLine(redPen, old_x - 1, bitmapHeight - old_y, x, bitmapHeight - y);
                                if (classification)
                                {
                                    yb = (int)(accuracy * yb0);
                                    graphics.DrawLine(bluePen, old_x - 1, bitmapHeight - old_yb, x, bitmapHeight - yb);
                                    old_yb = yb;
                                }

                                old_y = y;
                                old_x = x;

                            }


                        }//for w
                    }//for n
                }//for L



                if (saveLearning)
                {

                    weightFile.Write(e);
                    for (int L = 1; L < numLayers; L++)
                        for (int n = 0; n < Layer[L]; n++)
                            for (int w = 0; w < Layer[L - 1] + 1; w++)
                                weightFile.Write(" " + Weights[L][n][w]);
                    weightFile.WriteLine();


                    signalFile.WriteLine("epoch=" + e);
                    signalFile.Write("vect");
                    for (int L = 1; L < numLayers; L++)
                        for (int n = 0; n < Layer[L]; n++)
                            signalFile.Write(" " + L + "/" + n);
                    signalFile.WriteLine();

                    for (int v = 0; v < numVectors; v++)
                    {
                        signalFile.Write(v);
                        for (int L = 1; L < numLayers; L++)
                            for (int n = 0; n < Layer[L]; n++)
                                signalFile.Write(" " + SignalTableY[v][L][n]);
                        signalFile.WriteLine();
                    }
                }



            } //for e

            if (bitmapWidth > 1)
                bitmap.Save(outputFileName + ".png");

            error = oldError;
        }





        public void recParallel(int L1, int n1, int w1) //recalculating Y and SumWX for each vector after weight change [only the neuron which weight was changed, and all in the upper layers]
        {
            
            Parallel.For(0, numProcessors, p =>
            //for (int p = 0; p < numProcessors; p++)
			
            {
                for (int vect = StartVector[p]; vect <= EndVector[p]; vect++)
                {
                        if (w1 == Layer[L1 - 1]) //bias
                        {
                            SignalTableSumWX[vect][L1][n1] += Weights[L1][n1][w1];
                            SignalTableSumWX[vect][L1][n1] -= oldWeightTest;
                        }
                        else
                        {
                            SignalTableSumWX[vect][L1][n1] += Weights[L1][n1][w1] * SignalTableY[vect][L1 - 1][w1];
                            SignalTableSumWX[vect][L1][n1] -= oldWeightTest * SignalTableY[vect][L1 - 1][w1];
                        }
                        double oldY = SignalTableY[vect][L1][n1];
                        SignalTableY[vect][L1][n1] = Math.Tanh(SignalTableSumWX[vect][L1][n1]);



                        if (L1 != numLayers - 1)
                        {
                            int L = L1 + 1;

                            for (int n = 0; n < Layer[L]; n++)
                            {
                                SignalTableSumWX[vect][L][n] += Weights[L][n][n1] * SignalTableY[vect][L - 1][n1];
                                SignalTableSumWX[vect][L][n] -= Weights[L][n][n1] * oldY;
                                SignalTableY[vect][L][n] = Math.Tanh(SignalTableSumWX[vect][L][n]); //y
                            }

                            for (L = L1 + 2; L < numLayers; L++)
                            {
                                for (int n = 0; n < Layer[L]; n++)
                                {
                                    SignalTableSumWX[vect][L][n] = 0;
                                    for (int w = 0; w < Layer[L - 1]; w++)
                                        SignalTableSumWX[vect][L][n] += Weights[L][n][w] * SignalTableY[vect][L - 1][w];

                                    SignalTableSumWX[vect][L][n] += Weights[L][n][Layer[L - 1]]; //bias 
                                    SignalTableY[vect][L][n] = Math.Tanh(SignalTableSumWX[vect][L][n]);//y
                                }
                            }
                        }
                }
                });

           
            }


        public void FillSignalTableParallel(double[][] DataSet1)
        // all the signals for every training vector get propagated through the network to calculate the error
        {
            //double Error = 0;
            int numVect = DataSet1.GetLength(0);
            // accuracy = 0;
            //all the signals for every vector get propagated for the first time

           Parallel.For(0, numProcessors, p =>
          // for (int p = 0; p < numProcessors; p++)
           {

                   for (int vect = StartVector[p]; vect <= EndVector[p]; vect++)
                   {

                        for (int n = 0; n < Layer[0]; n++)
                            SignalTableY[vect][0][n] = DataSet1[vect][n];

                        for (int L = 1; L < numLayers; L++)
                        {
                            for (int n = 0; n < Layer[L]; n++)
                            {
                                SignalTableSumWX[vect][L][n] = 0;
                                for (int w = 0; w < Layer[L - 1]; w++)
                                    SignalTableSumWX[vect][L][n] += Weights[L][n][w] * SignalTableY[vect][L - 1][w];

                                SignalTableSumWX[vect][L][n] += Weights[L][n][Layer[L - 1]]; //bias 
                                SignalTableY[vect][L][n] = Math.Tanh(SignalTableSumWX[vect][L][n]);//y
                            }
                        }
                   }
             });
               
           
        }



        public double getError_STParallel(double[][] DataSet1, int L1, int n1, int w1, double errorExponent = 2, int test = 0, bool outliers = false, int errMeasure = 0, double delta = 0.5)
        //calculating network (all signals are stored in SignalTable)
        {
            
            int numVect = DataSet1.GetLength(0);
            accuracy = 0;

            

            if (classification)
            {
                int L = numLayers - 1;
                double[] maxY = new double[numProcessors];
                double[] maxN = new double[numProcessors];

               Parallel.For(0, numProcessors, p =>
             //  for (int p = 0; p < numProcessors; p++)
               {
                   PartialError[p] = 0;
                   PartialAccuracy[p] = 0;
                   maxY[p] = -1;
                   maxY[p] = -1;



                   for (int vect = StartVector[p]; vect <= EndVector[p]; vect++)
                   {
                       maxY[p] = -1; maxN[p] = -1;

                       for (int n = 0; n < Layer[L]; n++)
                       {
                           if (SignalTableY[vect][L][n] > maxY[p])
                           {
                               maxY[p] = SignalTableY[vect][L][n];
                               maxN[p] = n;
                           }
                           int y = (int)DataSet1[vect][Layer[0]] - 1;
                           //class numbering starts from 1
                           if (n == y)  //N[L][y].Y is expected to be 1;
                           {
                               if (SignalTableY[vect][L][n] < offset)
                                   PartialError[p] += Math.Pow(Math.Abs(SignalTableY[vect][L][n] - 1), errorExponent); //calculating network for classification
                           }
                           else  //N[L][y].Y is expected to be -1;
                           {
                               if (SignalTableY[vect][L][n] > -offset)
                                   PartialError[p] += Math.Pow(Math.Abs(SignalTableY[vect][L][n] + 1), errorExponent); //calculating network for classification
                           }
                       }

                       if (maxN[p] == DataSet1[vect][Layer[0]] - 1)
                           PartialAccuracy[p]++;


                      
                   }
                });
            }

            else //regression
            {
                int L = numLayers - 1;
                //int n = 0;
               Parallel.For(0, numProcessors, p =>
               // for (int p = 0; p < numProcessors; p++)
                {
                    PartialError[p] = 0;
                    for (int vect = StartVector[p]; vect <= EndVector[p]; vect++)
                    {

                        for (int n = 0; n < Layer[L]; n++)
                        {
                            SignalTableY[vect][L][n] = SignalTableSumWX[vect][L][n];
                            PartialError[p] += Math.Pow(Math.Abs(SignalTableY[vect][L][n] - DataSet1[vect][Layer[0] + n]), errorExponent); //calculating network for regression
                        }
                        
                     

                    }
                });
            }

            double Error = 0;
            accuracy = 0;
            for (int p = 0; p < numProcessors; p++)
            {
                Error += PartialError[p];
                accuracy += PartialAccuracy[p];
            }

            return Error;
        }



        void DumpSignalTable(string version)
        {

            StreamWriter stwx = new StreamWriter(@"..\..\..\SignalTableSumWX" + version + ".txt");
            for (int i = 0; i < SignalTableSumWX.GetLength(0); i++)
            {
                for (int j = 0; j < SignalTableSumWX[i].GetLength(0); j++)
                    for (int k = 0; k < SignalTableSumWX[j].GetLength(0); k++)
                        stwx.Write(SignalTableSumWX[i][j][k] + "   ");
                stwx.WriteLine();
            }
            stwx.Close();


            StreamWriter sty = new StreamWriter(@"..\..\..\SignalTableY" + version + ".txt");
            for (int i = 0; i < SignalTableY.GetLength(0); i++)
            {
                for (int j = 0; j < SignalTableY[i].GetLength(0); j++)
                    for (int k = 0; k < SignalTableY[j].GetLength(0); k++)
                        sty.Write(SignalTableY[i][j][k] + "   ");
                sty.WriteLine();
            }
            sty.Close();

        }


    }    
}
