﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO;

namespace MLP
{
    partial class Network
    {
        /*
        public double [][] getOutput(double[][] DataSet1, int IdLayer)
        // all the signals for every training vector get propagated through the network to calculate its generalized output
        // of the last (idLayer == 1) or previous layers (numLayers - 1>idLayer >1 
        {
            

            int numVect = DataSet1.GetLength(0);
            numTestVect = numVect;
            
            
            double [][] networkOutput;
            networkOutput = new double[numVect][];
            for (int vx = 0; vx < numVect; vx++)
                networkOutput[vx] = new double[Layer[numLayers - IdLayer]];

            for (int vect = 0; vect < numVect; vect++)
            {


                for (int n = 0; n < Layer[0]; n++)
                    N[0][n].Y = DataSet1[vect][n];

                for (int L = 1; L < numLayers; L++)
                {


                    for (int n = 0; n < Layer[L]; n++)
                    {
                        N[L][n].sumWX = 0;
                        double[] WX = new double[N[L][n].weight.Length];
                        for (int w = 0; w < N[L][n].weight.Length - 1; w++)
                        {
                            N[L][n].sumWX += N[L][n].weight[w] * N[L - 1][w].Y;
                            WX[w] = N[L][n].weight[w] * N[L - 1][w].Y;



                        }

                        N[L][n].sumWX += N[L][n].weight[N[L][n].weight.Length - 1]; //bias


                        double nY = N[L][n].sumWX;
                        if (sumMedian > 0)
                        {
                            WX[N[L][n].weight.Length - 1] = N[L][n].weight[N[L][n].weight.Length - 1]; //bias

                            Array.Sort(WX);
                            int mid = WX.Length / 2;
                            double medianWX = (WX.Length % 2 != 0) ? WX[mid] : (WX[mid] + WX[mid + 1]) / 2;

                            nY = (1 - sumMedian) * N[L][n].sumWX + sumMedian * medianWX;
                        }



                        switch (transferFunction)
                        {
                            case TransferFunction.Tanh:
                                N[L][n].Y = Math.Tanh(nY);
                                break;
                            case TransferFunction.Tanh0995:
                                if (nY >= -3 && nY <= 3) N[L][n].Y = Math.Tanh(nY) / 0.995;
                                else if (nY <= -3) N[L][n].Y = -1;
                                else N[L][n].Y = 1;
                                break;
                            case TransferFunction.Sin:
                                if (nY >= -1 && nY <= 1) N[L][n].Y = Math.Sin(nY);
                                else if (nY <= -1) N[L][n].Y = -1;
                                else N[L][n].Y = 1;
                                break;
                            case TransferFunction.Linear3:
                                if (nY >= -1 && nY <= 1) N[L][n].Y = nY;
                                else if (nY <= -1) N[L][n].Y = -1;
                                else N[L][n].Y = 1;
                                break;
                            case TransferFunction.Linear5:
                                if (nY >= -0.75 && nY <= 0.75) N[L][n].Y = nY;
                                else if (nY <= -0.75 && nY >= -2.75) N[L][n].Y = 0.125 * nY - 0.65625;
                                else if (nY >= 0.75 && nY >= -2.75) N[L][n].Y = 0.125 * nY + 0.65625;
                                else if (nY <= -2.75) N[L][n].Y = -1;
                                else N[L][n].Y = 1;
                                break;
                            case TransferFunction.Stair2:
                                if (nY < 0) N[L][n].Y = -1;
                                else N[L][n].Y = 1;
                                break;
                            case TransferFunction.Stair3:
                                if (nY < -1) N[L][n].Y = -1;
                                else if (nY < 1) N[L][n].Y = 0;
                                else N[L][n].Y = 1;
                                break;
                            case TransferFunction.Stair5:
                                if (nY < -2) N[L][n].Y = -1;
                                else if (nY < -0.5) N[L][n].Y = -0.5;
                                else if (nY < 0.5) N[L][n].Y = 0;
                                else if (nY < 2) N[L][n].Y = 0.5;
                                else N[L][n].Y = 1;
                                break;
                            case TransferFunction.Stair9:
                                if (nY < -3) N[L][n].Y = -1;
                                else if (nY < -1.3) N[L][n].Y = -0.9;
                                else if (nY < -0.5) N[L][n].Y = -0.6;
                                else if (nY < -0.1) N[L][n].Y = -0.3;
                                else if (nY < 0.1) N[L][n].Y = 0;
                                else if (nY < 0.5) N[L][n].Y = 0.3;
                                else if (nY < 1.3) N[L][n].Y = 0.6;
                                else if (nY < 3) N[L][n].Y = 0.9;
                                else N[L][n].Y = 1;
                                break;
                            case TransferFunction.Radial1:
                                if (L < 2)
                                    N[L][n].Y = 0.656517642749665 * (Math.Tanh(nY + 1) + Math.Tanh(-nY + 1));
                                else
                                    N[L][n].Y = Math.Tanh(nY);
                                break;
                            case TransferFunction.Radial2:
                                if (L < 2)
                                    N[L][n].Y = 1.313035285499331 * (Math.Tanh(nY + 1) + Math.Tanh(-nY + 1)) - 1;
                                else
                                    N[L][n].Y = Math.Tanh(nY);
                                break;
                            default: N[L][n].Y = Math.Tanh(nY); break;
                        }

                        if (L == numLayers - IdLayer)
                        {



                            if (classification)
                            {
                                
                                networkOutput[vect][n] = N[L][n].Y;

                            }
                            else
                            {
                                if (outputNeuronLinearForRegression)
                                    N[L][n].Y = nY;

                                networkOutput[vect][n] = N[L][n].Y;
                            }
                        }

                    }

                }


            }

            return networkOutput;
        }
        */


        public double [][] getOutput_ST(double[][] DataSet1, int IdLayer)
        //calculating network (all signals are stored in SignalTable)
        {
            int numVect = DataSet1.GetLength(0);

            numTestVect = numVect;


            double[][] networkOutput;
            networkOutput = new double[numVect][];
            for (int vx = 0; vx < numVect; vx++)
                networkOutput[vx] = new double[Layer[numLayers - IdLayer]];

            
           
            if (classification && outlierErrorCoefficiant > 0)
            {
                int L = numLayers - IdLayer;
                double maxY, maxN;
                for (int vect = 0; vect < numVect; vect++)
                {
                    maxY = -1; maxN = -1;

                   // if (outierErrorCoefficiant > 0 && outliers)
                 //   {
                    outlierCoef = DataSet1[vect][numInputs + 1];
                        if (outlierCoef < oc)
                            outlierCoef = oc;
                        outlierCoef = oc + Math.Pow((outlierCoef - oc), 2);
                 //   }

                    for (int n = 0; n < Layer[L]; n++)
                    {
                        if (SignalTableY[vect][L][n] > maxY)
                        {
                            maxY = SignalTableY[vect][L][n];
                            maxN = n;
                        }
                        int y = (int)DataSet1[vect][Layer[0]] - 1; // minClassNumber;
                        //class numbering starts from 1
                        
                                networkOutput[vect][n] = SignalTableY[vect][L][n];
                                
                        
                    }

                   
                   

                    
                }
            }

            else if (classification)
            {
                int L = numLayers - IdLayer;
                double maxY, maxN;
                for (int vect = 0; vect < numVect; vect++)
                {
                    maxY = -1; maxN = -1;

                    for (int n = 0; n < Layer[L]; n++)
                    {
                        if (SignalTableY[vect][L][n] > maxY)
                        {
                            maxY = SignalTableY[vect][L][n];
                            maxN = n;
                        }
                        int y = (int)DataSet1[vect][Layer[0]] - 1;
                        //class numbering starts from 1
                        networkOutput[vect][n] = SignalTableY[vect][L][n];
                    }

                    

                    
                }
            }

            else if (!(outlierErrorCoefficiant > 0))
            {
                int L = numLayers - IdLayer;
                //int n = 0;
                for (int vect = 0; vect < numVect; vect++)
                {                    

                   for (int n = 0; n < Layer[L]; n++)
                    {
                        SignalTableY[vect][L][n] = SignalTableSumWX[vect][L][n];
                        networkOutput[vect][n] = SignalTableY[vect][L][n];
                    }

                  

                }
            }

            else
            {
                int L = numLayers - IdLayer;
                int n = 0;
                for (int vect = 0; vect < numVect; vect++)
                {

                 //   if (outierErrorCoefficiant > 0 && outliers)
                  //  {
                    outlierCoef = DataSet1[vect][numInputs + 1];
                        if (outlierCoef < oc)
                            outlierCoef = oc;
                        outlierCoef = oc + Math.Pow((outlierCoef - oc), 2);
                 //   }
                   // for (int n = 0; n < Layer[L]; n++)
                   // {
                        SignalTableY[vect][L][n] = SignalTableSumWX[vect][L][n];
                        networkOutput[vect][n] = SignalTableY[vect][L][n];
                  //  }

                    

                }
            }



            return networkOutput;
        }



    }


}
