﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO;

namespace MLP
{
    class DeepNetwork 
    {

      

        public DeepNetwork(double[][] DataSet, int numEpoch, double weightRegularization, 
            int transferFunction, string trainingAlgorithm, int robustAlgorithm, bool productUnits, double eta, 
            double alpha, double etaRpropPlus, double etaRpropMinus, int VSS_ver, bool parallelTraining, 
            int errorMeasure, double errExp, double dw, bool classRegr, bool linRegr, bool RandomWeights,
            int[] numHidden, string outputFileName, int lastColumnContainsOutliers, ref double calculationProgress, int batchSize)
        {

            
            
            
            
            Network DAENet;
            Network DeepNet;
            double[][] DAEDataSet;
            double[][] tempDAEDataSet;
            Random R = new Random();
            
            
            // creating a deep network
            DeepNet = new Network(DataSet, numHidden, numEpoch, 0, weightRegularization,
                               0.25, outputFileName, transferFunction, dw, classRegr, linRegr,
                                0, lastColumnContainsOutliers, RandomWeights, errorMeasure, errExp, VSS_ver);


            // creating a dataset for an autoencoder: outputs should be reconstruction of inputs

            tempDAEDataSet = DataSet;

            int DataWidth = DataSet[0].GetLength(0) - 1;
            DAEDataSet = new double[DataSet.GetLength(0)][];
            for (int vx = 0; vx < DataSet.GetLength(0); vx++)
                DAEDataSet[vx] = new double[DataWidth * 2];


            for (int i = 0; i < DataSet.GetLength(0); i++)
            {
                for (int j = 0; j < DataWidth; j++)
                {
                    DAEDataSet[i][j] = DataSet[i][j];
                    DAEDataSet[i][j + DataWidth] = DataSet[i][j];
                    
                    /*// zeroing some features to avoid identity function:
                    if (R.NextDouble() < 0.01)
                    {
                        DAEDataSet[i][j] = 0;
                        DAEDataSet[i][j + DataWidth] = 0;
                    }*/

                }

                

            }

            // TODO: zeroing some features to avoid identity function
            // to be placed here!
            


            // splitting the network into autoencoders
            int numLayers = numHidden.GetLength(0);
            int[] singleLayer;
            singleLayer = new int[1];

            // for each hidden layer:
            for (int nL = 1; nL <= numLayers; nL++)
            {
                singleLayer[0] = numHidden[nL - 1];

                // create regular one-layer network
                DAENet = new Network(tempDAEDataSet, singleLayer, numEpoch, 0, weightRegularization,
                               0, outputFileName, transferFunction, dw, classRegr, linRegr,
                                0, lastColumnContainsOutliers, RandomWeights, errorMeasure, errExp, VSS_ver);

                
                // to change the network into an autoencoder:
                // the number of inputs determines the number of output neurons
                DAENet.numOutputs = DAENet.numInputs;

                Random rnd = new Random();


                int L = DAENet.numLayers - 1;
                DAENet.Layer[L] = DAENet.numOutputs;

                // create the autoencoder:
                /*
                if (VSS_ver > 0)
                {
                 */
                DAENet.Weights[L] = new double[DAENet.Layer[L]][];
                DAENet.Delta[L] = new double[DAENet.Layer[L]][];

                for (int n = 0; n < DAENet.Layer[L]; n++)
                {
                    DAENet.Weights[L][n] = new double[DAENet.Layer[L - 1] + 1];
                    DAENet.Delta[L][n] = new double[DAENet.Layer[L - 1] + 1];

                    for (int w = 0; w < DAENet.Layer[L - 1] + 1; w++)
                    {
                        DAENet.Weights[L][n][w] = 0.5 - rnd.NextDouble(); //adding random Weights
                        DAENet.Delta[L][n][w] = DAENet.dw0; //delta
                    }
                }


                for (int v = 0; v < DAENet.numVectors; v++)
                {

                    DAENet.SignalTableY[v][L] = new double[DAENet.Layer[L]];
                    DAENet.SignalTableSumWX[v][L] = new double[DAENet.Layer[L]];
                }
                /*
               }
               else
               {
                   DAENet.N[L] = new MLP.Network.Neuron[DAENet.Layer[L]];


                   for (int n = 0; n < DAENet.Layer[L]; n++)
                   {


                        
                       DAENet.N[L][n].weight = new double[DAENet.Layer[L - 1] + 1]; 
                       DAENet.N[L][n].oldWeight = new double[DAENet.Layer[L - 1] + 1]; 
                       DAENet.N[L][n].delta = new double[DAENet.Layer[L - 1] + 1]; 


                       for (int w = 0; w < DAENet.N[L][n].weight.Length; w++)
                       {
                           DAENet.N[L][n].weight[w] = 0.5 - rnd.NextDouble();  // initializing weights with random numbers from -0.5 to +0.5
                           DAENet.N[L][n].oldWeight[w] = DAENet.N[L][n].weight[w];
                           DAENet.N[L][n].delta[w] = 0.5;
                       }



                   }
               }
               */

                DAENet.TrainingDataSet = DAEDataSet;
                DAENet.classification = false; // for the autoencoder we need regression with multiple outputs

                DAENet.productUnits = productUnits;
                DAENet.parallelTraining = parallelTraining;
                DAENet.trainingAlgorithm = trainingAlgorithm;


                if (robustAlgorithm > 0)
                {
                    if (robustAlgorithm == 1)
                        DAENet.IM_VSS(VSS_ver);
                    else
                        DAENet.LT_VSS(VSS_ver);
                }
                else
                {
                    if (trainingAlgorithm.Substring(0, 3) == "VSS")
                    {
                       // DateTime dt1 = DateTime.Now;
                        if (batchSize > 0)
                            batchTraining(DAENet, batchSize, ref calculationProgress, numLayers, VSS_ver);
                        else
                            DAENet.VSS(VSS_ver, ref calculationProgress, numLayers);
                      //  DateTime dt2 = DateTime.Now;
                      //  ts = dt2 - dt1;
                    }
                    else
                        DAENet.BP(eta, alpha, etaRpropPlus, etaRpropMinus);
                }

               // messageCVTextBox3.Text += DAENet.error.ToString();
               // messageCVTextBox3.Text += " ";



                // layer output is used to create the dataset for the next layer (autoencoder)
               // if (VSS_ver > 0)
                DAENet.FillSignalTable(DAEDataSet);
                DAENet.error = DAENet.getError_ST(DAEDataSet, 0, 0, 0, errExp, 0, true, errorMeasure);
                    tempDAEDataSet = DAENet.getOutput_ST(DAEDataSet, 2);
               // else
               //     tempDAEDataSet = DAENet.getOutput(DAEDataSet, 2);

                 

                // we need only to double the data to make a new dataset (inputs == outputs)
                DataWidth = tempDAEDataSet[0].GetLength(0);
                DAEDataSet = new double[tempDAEDataSet.GetLength(0)][];
                for (int vx = 0; vx < tempDAEDataSet.GetLength(0); vx++)
                    DAEDataSet[vx] = new double[2 * DataWidth];

                for (int i = 0; i < tempDAEDataSet.GetLength(0); i++)
                {

                    for (int j = 0; j < DataWidth; j++)
                    {
                        DAEDataSet[i][j] = tempDAEDataSet[i][j];
                        DAEDataSet[i][j + DataWidth] = tempDAEDataSet[i][j];
                    }

                }

                //now we add artifficial output to initialize the next network with proper input size

                tempDAEDataSet = new double[tempDAEDataSet.GetLength(0)][];
                for (int vx = 0; vx < tempDAEDataSet.GetLength(0); vx++)
                    tempDAEDataSet[vx] = new double[DataWidth + 1];

                for (int i = 0; i < tempDAEDataSet.GetLength(0); i += DataWidth)
                {

                    for (int j = 0; j < DataWidth + 1; j++)
                    {
                        tempDAEDataSet[i][j] = DAEDataSet[i][j];

                    }

                }


                // writing the weights of the autoencoder into the deep network
             //   if (VSS_ver > 0)
             //   {
                    for (int n = 0; n < DAENet.Layer[L - 1]; n++)
                    {
                        DeepNet.Weights[nL][n] = new double[DAENet.Layer[L - 2] + 1];
                        DeepNet.Delta[nL][n] = new double[DAENet.Layer[L - 2] + 1];



                        for (int w = 0; w < DAENet.Weights[L - 1][n].Length; w++)
                        {
                            DeepNet.Weights[nL][n][w] = DAENet.Weights[L - 1][n][w];
                            DeepNet.Delta[nL][n][w] = DAENet.Delta[L - 1][n][w];


                        }
                    }
                /*
                }
                else
                {
                    for (int n = 0; n < DAENet.Layer[L - 1]; n++)
                    {
                        DeepNet.N[nL][n].weight = new double[DAENet.Layer[L - 2] + 1];
                        DeepNet.N[nL][n].oldWeight = new double[DAENet.Layer[L - 2] + 1];
                        DeepNet.N[nL][n].delta = new double[DAENet.Layer[L - 2] + 1];


                        for (int w = 0; w < DAENet.N[L - 1][n].weight.Length; w++)
                        {
                            DeepNet.N[nL][n].weight[w] = DAENet.N[L - 1][n].weight[w];
                            DeepNet.N[nL][n].oldWeight[w] = DAENet.N[L - 1][n].weight[w];
                            DeepNet.N[nL][n].delta[w] = DAENet.N[L - 1][n].delta[w];
                        }
                    }
                }
                 */
            }


            // write the deep network weights
            System.IO.StreamWriter sw0 = new System.IO.StreamWriter(outputFileName + "_finalWeights.txt"); //("weights.txt");
            sw0.Write("Network: ");
            for (int i = 0; i < DeepNet.numLayers - 1; i++)
                sw0.Write(DeepNet.Layer[i] + "-");
            sw0.WriteLine(DeepNet.Layer[DeepNet.numLayers - 1]);

            for (int L0 = 1; L0 < DeepNet.numLayers; L0++)
                for (int n0 = 0; n0 < DeepNet.Layer[L0]; n0++)
                    for (int w0 = 0; w0 < DeepNet.Layer[L0 - 1] + 1; w0++)
                        if (VSS_ver > 0)
                            sw0.WriteLine(DeepNet.Weights[L0][n0][w0]);
                        else
                            sw0.WriteLine(DeepNet.N[L0][n0].weight[w0]);

            sw0.Close();
            File.Copy(outputFileName + "_finalWeights.txt", @"..\..\..\Weights.txt", true);

        }


        public void batchTraining(Network DAENet, int batchSize, ref double calculationProgress, int numLayers, int VSS_ver)
        {

            double[][] tempDataSet;
            double[][] tempLongDataSet = DAENet.TrainingDataSet;
            double inputZero = 0.1;
            Random R = new Random();
            

            int DataWidth = DAENet.TrainingDataSet[0].GetLength(0);
            int DataLongLength = DAENet.TrainingDataSet.GetLength(0);
            int HalfDataWidth = DataWidth / 2;
            int numEpochs = DAENet.numEpochs;

            DAENet.numEpochs = 1;
            //int DataLength = DataLongLength / numBatches;
            int numBatches = DataLongLength / batchSize;
            tempDataSet = new double[batchSize][];

            for (int vx = 0; vx < batchSize; vx++)
                tempDataSet[vx] = new double[DataWidth];

            for (int ne = 0; ne < numEpochs + 12; ne++)

            for (int nm = 0; nm < numBatches; nm++)
            {
                for (int i = 0; i < batchSize; i++)
                {
                    if (nm * batchSize + i >= DataLongLength)
                        for (int j = 0; j < DataWidth; j++)
                        {
                            tempDataSet[i][j] = tempLongDataSet[nm * batchSize + i - DataLongLength][j];
                            if ((j < HalfDataWidth) && (R.NextDouble() < inputZero))
                            {
                                tempDataSet[i][j] = 0;

                            }

                        }
                    else
                        for (int j = 0; j < DataWidth; j++)
                        {
                            tempDataSet[i][j] = tempLongDataSet[nm*batchSize + i][j];
                            // zeroing some features to avoid identity function:
                            if ((j < HalfDataWidth) && (R.NextDouble() < inputZero))
                            {
                                tempDataSet[i][j] = 0;
                                
                            }
                        }

                }
                DAENet.TrainingDataSet = tempDataSet; 
                DAENet.VSS(VSS_ver, ref calculationProgress, numLayers);
                
            }

            DAENet.numEpochs = numEpochs;
            DAENet.TrainingDataSet = tempLongDataSet;
            
        }

    }
}
