﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO;

namespace MLP
{
    partial class Network
    {
        public double getError(double[,] DataSet1, int L1, int n1, int w1, double errorExponent = 2, int test = 0, bool outliers = true, int errMeasure = 0, double delta = 0.5)
        // all the signals for every training vector get propagated through the network to calculate the error
        {
            //  if (productUnits)          D:\MKordos\MLP\MLP\Crossvalidation.cs
            //      return getErrorPU(DataSet1, L1, n1, w1, errorExponent, test, outliers, errMeasure, delta);

            double Error = 0;
            double[] errorTable;
            double prevError = 0;
            int numVect = DataSet1.GetLength(0);
            numTestVect = numVect;
            accuracy = 0;


            string fe = "_trn_e.txt";


            if (test == 2)
                fe = "_tst_e.txt";

            // StreamWriter swst = new StreamWriter(@"D:\DeepLearning\NST.txt", true);
            //if (test > 0)
            //    sw = new StreamWriter(outputFileName + fe);
            errorTable = new double[numVect];

            for (int vect = 0; vect < numVect; vect++)
            {


                //  swst.WriteLine("**********************************************************");
                //  swst.WriteLine("**********************************************************");
                for (int n = 0; n < Layer[0]; n++)
                    N[0][n].Y = DataSet1[vect, n];

                double maxY = -1, maxN = -1;
                for (int L = 1; L < numLayers; L++)
                {
                    //if (test > 0)
                    //    if (L == numLayers - 1)
                    //    {
                    //        sw.WriteLine();
                    //        sw.Write(TrainingDataSet[vect, Layer[0]]);
                    //    }

                    for (int n = 0; n < Layer[L]; n++)
                    {
                        N[L][n].sumWX = 0;
                        double[] WX = new double[N[L][n].weight.Length];
                        for (int w = 0; w < N[L][n].weight.Length - 1; w++)
                        {
                            N[L][n].sumWX += N[L][n].weight[w] * N[L - 1][w].Y;
                            WX[w] = N[L][n].weight[w] * N[L - 1][w].Y;


                            //     swst.WriteLine("vect=" + vect + "  n=" + n + "  w=" + w + "       L=" + L);
                            //     swst.WriteLine("N[L][n].sumWX=" + N[L][n].sumWX + " N[L][n].weight[w]=" + N[L][n].weight[w] + " N[L][n].oldWeight[w]=" + N[L][n].oldWeight[w]);// + " " + DataSet1[vect, w]);

                        }

                        N[L][n].sumWX += N[L][n].weight[N[L][n].weight.Length - 1]; //bias
                        //  swst.WriteLine("bias: vect=" + vect + "  n=" + n + "       L=" + L + "  bias");
                        //   swst.WriteLine("bias: N[L][n].sumWX=" + N[L][n].sumWX + " N[L][n].weight[w]=" + N[L][n].weight[N[L][n].weight.Length - 1] + " N[L][n].oldWeight[w]=" + N[L][n].oldWeight[N[L][n].weight.Length - 1]);


                        double nY = N[L][n].sumWX;
                        if (sumMedian > 0)
                        {
                            WX[N[L][n].weight.Length - 1] = N[L][n].weight[N[L][n].weight.Length - 1]; //bias

                            Array.Sort(WX);
                            int mid = WX.Length / 2;
                            double medianWX = (WX.Length % 2 != 0) ? WX[mid] : (WX[mid] + WX[mid + 1]) / 2;

                            nY = (1 - sumMedian) * N[L][n].sumWX + sumMedian * medianWX;
                        }



                        switch (transferFunction)
                        {
                            case TransferFunction.Tanh:
                                N[L][n].Y = Math.Tanh(nY);
                                // swst.WriteLine(nY + "   " + N[L][n].Y);
                                // inSignalTable[L - 1][n, vect] = nY;
                                // outSignalTable[L - 1][n, vect] = N[L][n].Y;
                                break;
                            case TransferFunction.Tanh0995:
                                if (nY >= -3 && nY <= 3) N[L][n].Y = Math.Tanh(nY) / 0.995;
                                else if (nY <= -3) N[L][n].Y = -1;
                                else N[L][n].Y = 1;
                                break;
                            case TransferFunction.Sin:
                                if (nY >= -1 && nY <= 1) N[L][n].Y = Math.Sin(nY);
                                else if (nY <= -1) N[L][n].Y = -1;
                                else N[L][n].Y = 1;
                                break;
                            case TransferFunction.Linear3:
                                if (nY >= -1 && nY <= 1) N[L][n].Y = nY;
                                else if (nY <= -1) N[L][n].Y = -1;
                                else N[L][n].Y = 1;
                                break;
                            case TransferFunction.Linear5:
                                if (nY >= -0.75 && nY <= 0.75) N[L][n].Y = nY;
                                else if (nY <= -0.75 && nY >= -2.75) N[L][n].Y = 0.125 * nY - 0.65625;
                                else if (nY >= 0.75 && nY >= -2.75) N[L][n].Y = 0.125 * nY + 0.65625;
                                else if (nY <= -2.75) N[L][n].Y = -1;
                                else N[L][n].Y = 1;
                                break;
                            case TransferFunction.Stair2:
                                if (nY < 0) N[L][n].Y = -1;
                                else N[L][n].Y = 1;
                                break;
                            case TransferFunction.Stair3:
                                if (nY < -1) N[L][n].Y = -1;
                                else if (nY < 1) N[L][n].Y = 0;
                                else N[L][n].Y = 1;
                                break;
                            case TransferFunction.Stair5:
                                if (nY < -2) N[L][n].Y = -1;
                                else if (nY < -0.5) N[L][n].Y = -0.5;
                                else if (nY < 0.5) N[L][n].Y = 0;
                                else if (nY < 2) N[L][n].Y = 0.5;
                                else N[L][n].Y = 1;
                                break;
                            case TransferFunction.Stair9:
                                if (nY < -3) N[L][n].Y = -1;
                                else if (nY < -1.3) N[L][n].Y = -0.9;
                                else if (nY < -0.5) N[L][n].Y = -0.6;
                                else if (nY < -0.1) N[L][n].Y = -0.3;
                                else if (nY < 0.1) N[L][n].Y = 0;
                                else if (nY < 0.5) N[L][n].Y = 0.3;
                                else if (nY < 1.3) N[L][n].Y = 0.6;
                                else if (nY < 3) N[L][n].Y = 0.9;
                                else N[L][n].Y = 1;
                                break;
                            case TransferFunction.Radial1:
                                if (L < 2)
                                    N[L][n].Y = 0.656517642749665 * (Math.Tanh(nY + 1) + Math.Tanh(-nY + 1));
                                else
                                    N[L][n].Y = Math.Tanh(nY);
                                break;
                            case TransferFunction.Radial2:
                                if (L < 2)
                                    N[L][n].Y = 1.313035285499331 * (Math.Tanh(nY + 1) + Math.Tanh(-nY + 1)) - 1;
                                else
                                    N[L][n].Y = Math.Tanh(nY);
                                break;
                            default: N[L][n].Y = Math.Tanh(nY); break;
                        }

                        if (L == numLayers - 1)
                        {

                            if (outlierErrorCoefficiant > 0)
                            {
                                outlierCoef = DataSet1[vect, numInputs + 1];

                                if (outlierCoef < oc)
                                    outlierCoef = oc;

                                outlierCoef = oc + Math.Pow((outlierCoef - oc), 2);
                            }


                            if (classification)
                            {
                                if (N[L][n].Y > maxY)
                                {
                                    maxY = N[L][n].Y;
                                    maxN = n;
                                }

                                int y = (int)DataSet1[vect, Layer[0]] - 1; // minClassNumber;
                                //class numbering starts from 1
                                if (n == y)  //N[L][y].Y is expected to be 1;
                                {
                                    if (N[L][n].Y < offset)
                                        Error += Math.Pow(Math.Abs(N[L][n].Y - 1), errorExponent) / outlierCoef;
                                    //   swst.WriteLine("1 er=" + Error);//+"   "+"  ce="+Math.Pow(Math.Abs(N[L][n].Y - 1), errorExponent));
                                }
                                else  //N[L][y].Y is expected to be -1;
                                {
                                    if (N[L][n].Y > -offset)
                                        Error += Math.Pow(Math.Abs(N[L][n].Y + 1), errorExponent) / outlierCoef;
                                    //   swst.WriteLine("-1 er=" + Error);// + "   " + "  ce=" + Math.Pow(Math.Abs(N[L][n].Y - 1), errorExponent));
                                }

                               // if (test > 0)
                               //     sw.Write("  " + Math.Round(N[L][n].Y, 4));

                            }
                            else
                            {
                                if (outputNeuronLinearForRegression)
                                    N[L][n].Y = nY;

                                Error += Math.Pow(Math.Abs(N[L][n].Y - DataSet1[vect, Layer[0]]), errorExponent) / outlierCoef;
                            }
                        }

                    }

                }
                if (maxN == DataSet1[vect, Layer[0]] - 1) // - lastColumnContainsOutliers)
                    accuracy++;

                // to calculate quantile-based error 
                errorTable[vect] = Error - prevError;
                prevError = Error;

            }

            if (errMeasure > 0)
            {
                //calculation of the quantile-based or trimmed error
                Array.Sort(errorTable);
                int midErrors = (int)(numVect * delta);
                if (midErrors > numVect)
                    midErrors = numVect;
                if (midErrors < 1)
                    midErrors = 1;

                //trimmed mean: delta smallest errors out of numVect are summed
                if (errMeasure > 1)
                {
                    Error = 0;
                    for (int i = 0; i < midErrors; i++)
                    {
                        Error += errorTable[i];
                    }
                    Error = Error / midErrors;
                }
                //delta-rank quantile error: delta=0.5 is median error
                else
                {
                    //Error = (numVect % 2 != 0) ? errorTable[midErrors - 1] : (errorTable[midErrors - 1] + errorTable[midErrors]) / 2;
                    Error = errorTable[midErrors - 1];

                }
            }
            else if (errMeasure == -1)  // to estimate delta for robust LT learning
            {
                double meanError = 0;
                //Error = 0;
                for (int i = 0; i < numVect; i++)
                {
                    meanError += errorTable[i];
                }
                meanError = meanError / numVect;
                //meanError = Error / numVect;

                //calculating mad
                Error = 0;
                for (int i = 0; i < numVect; i++)
                {
                    Error += Math.Abs(errorTable[i] - meanError);
                }
                Error = Error / numVect;
                Error *= 3;

                int deltaCounter = 0;   //to count errors smaller than 3*mad

                for (int i = 0; i < numVect; i++)
                {
                    if (Math.Abs(errorTable[i]) < Error) deltaCounter++;
                }

                Error = (double)deltaCounter / (double)numVect;
                //if (delta > 1) delta = 1;


                //Array.Sort(errorTable);
            }
            else if (errMeasure == -2)   //to remove outliers for ILMedS learning
            {
                int N = TrainingDataSet.GetLength(0);
                int d = TrainingDataSet.GetLength(1);
                int reducedIndex = 0;
                double sigma = 1.4826 * (1 + 5 / (double)(N - d - 1)) * Math.Sqrt(delta);
                sigma = 2.5 * Math.Pow(sigma, 2); //threshold
                double[,] reducedDataSet;
                reducedDataSet = new double[N, d];


                for (int i = 0; i < N; i++) //remove from the training set patterns with error<sigma
                {
                    if (errorTable[i] < sigma)
                    {
                        for (int j = 0; j < d; j++)
                        {
                            reducedDataSet[reducedIndex, j] = TrainingDataSet[i, j];
                        }
                        reducedIndex++;
                    }

                }
                if (reducedIndex > 1)
                {
                    double[,] finalDataSet = new double[reducedIndex, d];
                    Array.ConstrainedCopy(reducedDataSet, 0, finalDataSet, 0, reducedIndex);
                    TrainingDataSet = finalDataSet;
                }


            }

            return Error;
        }




    }


}
