﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO;

namespace MLP
{
    partial class Network
    {
        public void VSS_ST_PTR_4() // MLP network learning algorithm: finds the optimal weights that minimize MSE
        // that can be any MLP learning algorithm, I use a simplified version of VSS here
        // just a small value dw is added to each single weight and the error is calculated if the error decreases,
        // w = w + dw, else dw is subtracted t=from that the weight and the error is calculated again
        // and the error is calculated if the error decreases, w = w - dw, otherwise w remains unchanged.
        // This operation is repeated with each weight and the whole iteration is repeated several times until
        // decreases significiantly
        {
            FillSignalTable_PTR4(TrainingDataSet);
            oldError = getError_ST_PTR(TrainingDataSet, 0, 0, 0, errExp, 0, true, errMeasure, delta);//(TrainingDataSet, 0, 0, 0);
            double Error = oldError;
            double dw = dw0;
            StreamWriter swS2 = new StreamWriter(outputFileName + "_errors.txt");
            swS2.WriteLine("epoch    error    accurcy");
            unsafe
            {
                fixed (double* p1 = &weights[0, 0, 0], p2 = &deltaa[0, 0, 0])
                {
                    for (int e = 1; e <= numEpochs; e++)
                    {
                        if (classification)
                            swS2.WriteLine(e + "    " + oldError / numVectors + "    " + accuracy / numVectors);
                        else
                            swS2.WriteLine(e + "    " + oldError / numVectors);
                        dw *= 0.995;
                        for (int L = 1; L < numLayers; L++)
                        {
                            int iL = L * weightsGetLength1 * weightsGetLength2;
                            for (int n = 0; n < Layer[L]; n++)
                            {
                                int iN = n * weightsGetLength2;
                                for (int w = 0; w < Layer[L - 1] + 1; w++)
                                {
                                    bool errorDecreased = false;
                                    dw = 0.67 * *(p2 + iL + iN + w);
                                    double oldW = *(p1 + iL + iN + w);
                                    oldWeightTest = *(p1 + iL + iN + w);
                                    *(p1 + iL + iN + w) += dw;
                                    rec_PTR4(L, n, w);
                                    if ((Error = getError_ST_PTR(TrainingDataSet, L, n, w, errExp, 0, true, errMeasure, delta)) < oldError)
                                    {
                                        oldError = Error;
                                        errorDecreased = true;
                                        *(p2 + iL + iN + w) = dw;
                                        oldWeightTest = *(p1 + iL + iN + w);
                                        *(p1 + iL + iN + w) += dw;
                                        rec_PTR4(L, n, w);
                                        if ((Error = getError_ST_PTR(TrainingDataSet, L, n, w, errExp, 0, true, errMeasure, delta)) < oldError)
                                        {
                                            oldError = Error;
                                            *(p2 + iL + iN + w) = 2 * dw;
                                        }
                                        else
                                        {
                                            oldWeightTest = *(p1 + iL + iN + w);
                                            *(p1 + iL + iN + w) -= dw;
                                            rec_PTR4(L, n, w);
                                        }
                                    }
                                    else
                                    {
                                        oldWeightTest = *(p1 + iL + iN + w);
                                        *(p1 + iL + iN + w) -= 2 * dw;
                                        rec_PTR4(L, n, w);
                                        if ((Error = getError_ST_PTR(TrainingDataSet, L, n, w, errExp, 0, true, errMeasure, delta)) < oldError)
                                        {
                                            oldError = Error;
                                            errorDecreased = true;
                                            oldWeightTest = *(p1 + iL + iN + w);
                                            *(p1 + iL + iN + w) -= dw;
                                            *(p2 + iL + iN + w) = -dw;
                                            rec_PTR4(L, n, w);
                                            if ((Error = getError_ST_PTR(TrainingDataSet, L, n, w, errExp, 0, true, errMeasure, delta)) < oldError)
                                            {
                                                oldError = Error;
                                                *(p2 + iL + iN + w) = -2 * dw;
                                            }
                                            else
                                            {
                                                oldWeightTest = *(p1 + iL + iN + w);
                                                *(p1 + iL + iN + w) += dw;
                                                rec_PTR4(L, n, w);
                                            }
                                        }
                                        if (!errorDecreased)
                                        {
                                            oldWeightTest = *(p1 + iL + iN + w);
                                            *(p1 + iL + iN + w) = oldW;
                                            rec_PTR4(L, n, w);
                                            *(p2 + iL + iN + w) = 0.67 * dw;
                                        }
                                    }
                                }//for w
                            }//for n
                        }//for L
                    } //for e
                }
            }
            swS2.Close();
            error = oldError;
        }

        public void FillSignalTable_PTR4(double[,] DataSet1)
        // all the signals for every training vector get propagated through the network to calculate the error
        {
           
            int numVect = DataSet1.GetLength(0);
            int iVDataSet, iVSignal, iLSignal, iLWeight, iNWeights;
            int sL12 = signaltableL1 * signaltableL2;
            int wL12 = weightsLength1 * weightsLength2;
            unsafe
            {
                fixed (double* p1 = &SignalTableSumWX[0, 0, 0], p2 = &SignalTableY[0, 0, 0], p3 = &weights[0, 0, 0], p4 = &DataSet1[0, 0])
                {
                    for (int vect = 0; vect < numVect; vect++)
                    {
                        iVDataSet = DataSetL1 * vect;
                        iVSignal = vect * sL12;
                        for (int n = 0; n < Layer[0]; n++)
                            *(p2 + n + iVSignal) = *(p4 + iVDataSet + n); //filling Input layer

                        int L = 1;
                        
                            iLWeight = L * wL12;
                            iLSignal = L * signaltableL2;
                            int iL11Signal = (L - 1) * signaltableL2;
                            for (int n = 0; n < Layer[L]; n++) // filling Hidden layer
                            {
                                iNWeights = n * weightsLength2;
                                *(p1 + iVSignal + iLSignal + n) = 0;
                                for (int w = 0; w < Layer[L - 1]; w++)
                                {
                                    *(p1 + iVSignal + iLSignal + n) += *(p3 + iLWeight + iNWeights + w) * *(p2 + iVSignal + iL11Signal + w);
                                }
                                *(p1 + iVSignal + iLSignal + n) += *(p3 + iLWeight + iNWeights + Layer[L - 1]); //bias 
                                *(p2 + iVSignal + iLSignal + n) = Math.Tanh(*(p1 + iVSignal + iLSignal + n));//y
                            }
                       
                        
                        L = 2;
                       
                            iLWeight = L * wL12;
                            iLSignal = L * signaltableL2;
                            iL11Signal = (L - 1) * signaltableL2;
                            int iL12Signal = (L - 2) * signaltableL2;
                            for (int n = 0; n < Layer[L]; n++) //filling output layer
                            {
                                iNWeights = n * weightsLength2;
                                *(p1 + iVSignal + iLSignal + n) = 0;
                                for (int w = 0; w < Layer[L - 1]; w++) 
                                    *(p1 + iVSignal + iLSignal + n) += *(p3 + iLWeight + iNWeights + w) * *(p2 + iVSignal + iL11Signal + w); //SumWX += weight * Y (Y from hidden layer)
                                for (int w = Layer[L - 1]; w < Layer[L - 2] + Layer[L - 1]; w++)
                                {
                                    int iW2 = (w - Layer[L - 1]);
                                    *(p1 + iVSignal + iLSignal + n) += *(p3 + iLWeight + iNWeights + w) * *(p2 + iVSignal + iL12Signal + iW2); //SumWX += weight * Y (Y from input layer)
                                }
                                *(p1 + iVSignal + iLSignal + n) += *(p3 + iLWeight + iNWeights + Layer[L - 1] + Layer[L - 2]); //bias 
                                *(p2 + iVSignal + iLSignal + n) = Math.Tanh(*(p1 + iVSignal + iLSignal + n));//y
                            }
                       
                    }
                }
            }

        }

        public void rec_PTR4(int L1, int n1, int w1) //recalculating Y and SumWX for each vector after weight change [only the neuron which weight was changed, and all in the upper layers]
        {
            int sL12 = signaltableL1 * signaltableL2;
            int wL12 = weightsLength1 * weightsLength2;
            int iLWeight, iLSignal, iL111Signal, iL112Signal, iNWeights, iLWeights, iL, iL11;

            unsafe
            {
                fixed (double* p1 = &SignalTableSumWX[0, 0, 0], p2 = &SignalTableY[0, 0, 0], p3 = &weights[0, 0, 0])
                {
                    int iN1weights = n1 * weightsLength2;
                    int iL1weights = L1 * wL12;
                    int iL1Signal = L1 * signaltableL2;
                    int iL11Signal = (L1 - 1) * signaltableL2;
                    for (int vect = 0; vect < TrainingDataSet.GetLength(0); vect++)
                    {
                        int iV = vect * sL12;
                        if (w1 == Layer[L1 - 1])
                        {
                            *(p1 + iV + iL1Signal + n1) += *(p3 + w1 + iN1weights + iL1weights);
                            *(p1 + iV + iL1Signal + n1) -= oldWeightTest;
                        }
                        else
                        {
                            *(p1 + iV + iL1Signal + n1) += *(p3 + w1 + iN1weights + iL1weights) * *(p2 + iV + iL11Signal + w1);
                            *(p1 + iV + iL1Signal + n1) -= oldWeightTest * *(p2 + iV + iL11Signal + w1);
                        }
                        double oldY = *(p2 + iV + iL1Signal + n1);
                        *(p2 + iV + iL1Signal + n1) = Math.Tanh(*(p1 + iV + iL1Signal + n1));
                        if (L1 != numLayers - 1)
                        {
                            int L = L1 + 1; 
                            
                                iLWeights = L * wL12;
                                iL = L * signaltableL2;
                                iL11 = (L - 1) * signaltableL2;

                                for (int n = 0; n < Layer[L]; n++)
                                {
                                    iNWeights = n * weightsLength2;
                                    *(p1 + iV + iL + n) += *(p3 + iLWeights + iNWeights + n1) * *(p2 + n1 + iV + iL11);
                                    *(p1 + iV + iL + n) -= *(p3 + iLWeights + iNWeights + n1) * oldY;
                                    *(p2 + iV + iL + n) = Math.Tanh(*(p1 + iV + iL + n)); //y
                                }

                            //    for (L = L1 + 2; L < numLayers; L++) //this section is not needed if number of hidden layers = 1
                            //    {
                            //        iLWeight = L * wL12;
                            //        iLSignal = L * signaltableL2;
                            //        iL111Signal = (L - 1) * signaltableL2;
                            //        iL112Signal = (L - 2) * signaltableL2;
                            //        for (int n = 0; n < Layer[L]; n++)
                            //        {
                            //            iNWeights = n * weightsLength2;
                            //            *(p1 + iV + iLSignal + n) = 0;
                            //            for (int w = 0; w < Layer[L - 1]; w++)
                            //                *(p1 + iV + iLSignal + n) += *(p3 + iLWeight + iNWeights + w) * *(p2 + iV + iL111Signal + w);
                            //            for (int w = Layer[L - 1]; w < Layer[L - 2] + Layer[L - 1]; w++)
                            //            {
                            //                int iW2 = (w - Layer[L - 1]);
                            //                *(p1 + iV + iLSignal + n) += *(p3 + iLWeight + iNWeights + w) * *(p2 + iV + iL112Signal + iW2);
                            //            }
                            //            *(p1 + iV + iLSignal + n) += *(p3 + iLWeight + iNWeights + Layer[L - 1] + Layer[L - 2]); //bias 
                            //            *(p2 + iV + iLSignal + n) = Math.Tanh(*(p1 + iV + iLSignal + n));//
                            //        }
                            //}
                        }
                    }
                }
            }
        }
    }
}
