﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO;

namespace MLP
{
    class Crossvalidation
    {

        int numVectors;
        int numAttributes;
        string[] trainingFileNames, testFileNames;
        string headerLine;
        public double[] ErrorsMSE, ErrorsRMSE, Accuracy;
       
        public double meanErrorMSE, stdDevErrorMSE, meanAccuracy, stdDevAccuracy, eta = 0.03, alpha = 0.3,
                      meanErrorRMSE, stdDevErrorRMSE, etaRpropPlus = 1.2, etaRpropMinus = 0.5;
        public bool classification, noiseInput, noiseOutput, UseExistingCvFiles, productUnits, OutlierNoise, outliers;
        public double noiseValue = 0, noiseFrequencyIn = 0, noiseFrequencyOut = 0;

        public void RunCV(string trainingAlgorithm, string fileName, int numEpochs, int[] numHidden,
                         double sumMedian = 0, double weightRegularization = 0,
                         string outputFileName = "results", int transferF = 0,
                         int fold = 10, double dw0 = 0.5, bool randomize = false, bool classification = true,
                         int crossvalidationRuns = 1, double outCeof = 1, int lastColumnContainsOutliers = 0,
                         bool ENN = false, double theta = 1.0, int errorMeasure = 0, double errExp = 2,
                         string robustAlgorithm = "MSE", int VSS_version = 0, bool outliers=false)
        {

                    //if (VSS_version == 3 || VSS_version == 4)  //crossover + signal table
                    //{
                    //    System.Windows.MessageBox.Show("Not implemented yet");
                    //    return;
                    //}


            this.outliers = outliers;
            ErrorsMSE = new double[fold * crossvalidationRuns];
            ErrorsRMSE = new double[fold * crossvalidationRuns];
            Accuracy = new double[fold * crossvalidationRuns];
            Network[] mlp = new Network[fold];
            this.classification = classification;

            for (int r = 0; r < crossvalidationRuns; r++)
            {

               Parallel.For(0, fold, i =>
               // for (int i = 0; i < fold; i++)
                {
                    string outputFile = trainingFileNames[i];

                    if (ENN || lastColumnContainsOutliers > 0)
                    {
                        kNN knn = new kNN(trainingFileNames[i], 9, 2);
                        knn.GetDistances();
                        knn.ENN(theta);
                        if (ENN)
                            knn.SaveDataSetWithOutliers(1, ref trainingFileNames[i]);
                        else if (lastColumnContainsOutliers > 0)
                            knn.SaveDataSetWithOutliers(0, ref trainingFileNames[i]);
                        //string fileName2 = System.IO.Path.GetFileName(trainingFileNames[i]);
                        //string fileName3 = fileName2.Substring(0, fileName2.LastIndexOf("."));
                        //string directoryName = System.IO.Path.GetDirectoryName(trainingFileNames[i]);
                        //outputFile = directoryName + "\\" + fileName3 + "_ENN.txt";
                        GC.Collect();
                    }


                    mlp[i] = new Network(LoadFile(trainingFileNames[i]), numHidden, numEpochs,
                                              sumMedian, weightRegularization,
                                              0, outputFileName + "_" + (r * fold + i).ToString(), transferF, dw0,
                                              classification, true, outCeof, lastColumnContainsOutliers, true, errorMeasure, errExp, VSS_version, outliers);

                    mlp[i].productUnits = productUnits;
                    mlp[i].trainingAlgorithm = trainingAlgorithm;
                   
                    if (robustAlgorithm == "ILMedS")
                        mlp[i].IM_VSS(VSS_version);
                    else if (robustAlgorithm == "LTA")
                        mlp[i].LT_VSS(VSS_version);                   
                    else if (robustAlgorithm == "MSE")
                    {
                        if (trainingAlgorithm.Substring(0,3) == "VSS")
                            mlp[i].VSS(VSS_version);
                        else
                            mlp[i].BP(eta, alpha, etaRpropPlus, etaRpropMinus);
                    }
                    /* (trainingAlgoritmh == "VSS")
                        mlp[i].VSS();
                    else
                        mlp[i].BP(eta, alpha, etaRpropPlus, etaRpropMinus);*/

              //      if (VSS_version == 0)                    
               //         ErrorsMSE[r * fold + i] = mlp[i].getError(LoadFile(testFileNames[i]), 0, 0, 0, 2, 2, false) / mlp[i].numTestVect;
  

                    mlp[i].TestDataSet = LoadFile(testFileNames[i]);

                    if (VSS_version == 0)
                        ErrorsMSE[r * fold + i] = mlp[i].getError(mlp[i].TestDataSet, 0, 0, 0, mlp[i].errExp, 2, false) / mlp[i].numTestVect;
                    else 
                    {
                        mlp[i].numVectors = mlp[i].TestDataSet.GetLength(0);
                        mlp[i].numTestVect = mlp[i].numVectors;
                        mlp[i].SignalTableY = new double[mlp[i].numVectors, mlp[i].numLayers, mlp[i].maxNeurons];
                        mlp[i].SignalTableSumWX = new double[mlp[i].numVectors, mlp[i].numLayers, mlp[i].maxNeurons];

                        if (VSS_version == 1 || VSS_version == 2)
                            mlp[i].FillSignalTable(mlp[i].TestDataSet);
                        else
                            mlp[i].FillSignalTableCross(mlp[i].TestDataSet);

                        double tmpCoutlierErrorCoefficiant = mlp[i].outlierErrorCoefficiant;
                        mlp[i].outlierErrorCoefficiant = 0;
                        ErrorsMSE[r * fold + i] = mlp[i].getError_ST(mlp[i].TestDataSet, 0, 0, 0, mlp[i].errExp) / mlp[i].numTestVect;
                        mlp[i].outlierErrorCoefficiant = tmpCoutlierErrorCoefficiant;
                    }

                    

                    ErrorsRMSE[r * fold + i] = Math.Pow(ErrorsMSE[r * fold + i],1/errExp);
                    Accuracy[r * fold + i] = mlp[i].accuracy / mlp[i].numTestVect;
               });
                //  }
            }
            meanErrorMSE = ErrorsMSE.Average();
            stdDevErrorMSE = Math.Pow(ErrorsMSE.Sum(d => Math.Pow(d - meanErrorMSE,2)) / (fold * crossvalidationRuns),1/errExp);
       
            meanErrorRMSE = ErrorsRMSE.Average();
            stdDevErrorRMSE = Math.Pow(ErrorsRMSE.Sum(d => Math.Pow(d - meanErrorRMSE, 2)) / (fold * crossvalidationRuns), 1 / errExp);
             
            meanAccuracy = Accuracy.Average();
            stdDevAccuracy = Math.Pow(Accuracy.Sum(d => Math.Pow(d - meanAccuracy, 2)) / (fold * crossvalidationRuns), 1 / errExp);



        }

        public void AddNoise(double[,] DataSet, string fileName, string headerLine1)
        {

            numVectors = DataSet.GetLength(0);
            numAttributes = DataSet.GetLength(1);
            double pi2 = 1 / (Math.Sqrt(2 * Math.PI));

            StreamWriter trainingStreamWriter = new StreamWriter(fileName);
            trainingStreamWriter.WriteLine(headerLine1);
            Random R = new Random();
            double r;
            for (int v = 0; v < numVectors; v++)
            {
                if (noiseInput)
                {
                    if ((r = R.NextDouble()) < noiseFrequencyIn)
                    {
                        for (int a = 0; a < numAttributes - 1; a++)
                            trainingStreamWriter.Write(String.Format("{0:0.00000}", DataSet[v, a] + noiseValue * 1.72 *(R.NextDouble()+R.NextDouble()+R.NextDouble()+R.NextDouble()-2)) + " "); 
                            //Gaussian noise, sum of 4 uniform distributions is already quite a good approximation of gaussian distribution                     
                    }
                    else
                    {
                        for (int a = 0; a < numAttributes - 1; a++)
                            trainingStreamWriter.Write(String.Format("{0:0.00000}", DataSet[v, a]) + " ");
                    }
                }
                else
                {
                    for (int a = 0; a < numAttributes - 1; a++)
                        trainingStreamWriter.Write(String.Format("{0:0.00000}", DataSet[v, a]) + " ");
                }

                if (noiseOutput)
                {
                    if ((r = R.NextDouble()) < noiseFrequencyOut)
                        trainingStreamWriter.Write(String.Format("{0:0.00000}", DataSet[v, numAttributes - 1] + noiseValue * 1.72 * (R.NextDouble()+R.NextDouble()+R.NextDouble()+R.NextDouble()-2)) + " ");
                        //Gaussian noise, sum of 4 uniform distributions is already quite a good approximation of gaussian distribution 
                    else
                        trainingStreamWriter.Write(String.Format("{0:0.00000}", DataSet[v, numAttributes - 1]));
                }
                else
                {
                    trainingStreamWriter.Write(String.Format("{0:0.00000}", DataSet[v, numAttributes - 1]));
                }

                trainingStreamWriter.WriteLine();
            }
            trainingStreamWriter.Close();
        }





        public void SpiltData(double[,] DataSet, string fileName, string headerLine1, int fold)
        {
            Random R = new Random();
            double r;

            headerLine = headerLine1;
            numVectors = DataSet.GetLength(0);
            numAttributes = DataSet.GetLength(1);

            trainingFileNames = new string[fold];
            testFileNames = new string[fold];
            string fns = System.IO.Path.GetDirectoryName(fileName) + "\\" + System.IO.Path.GetFileNameWithoutExtension(fileName);

            int numExistingFiles = 0;
            if (UseExistingCvFiles)
            {
                if (File.Exists(fns + "_" + fold.ToString() + "_trn" + ".txt") || File.Exists(fns + "_" + fold.ToString() + "_tst" + ".txt"))
                {
                    numExistingFiles = fold + 1;
                }
                else
                {
                    for (int f = 0; f < fold; f++)
                    {
                        trainingFileNames[f] = fns + "_" + f.ToString() + "_trn" + ".txt";
                        testFileNames[f] = fns + "_" + f.ToString() + "_tst" + ".txt";
                        if (File.Exists(trainingFileNames[f]) && File.Exists(testFileNames[f]))
                            numExistingFiles++;
                        else
                            break;
                    }
                }

                if (numExistingFiles == fold)
                    return;
            }



            int[] steps = new int[fold + 1];
            for (int s = 0; s < fold; s++)
                steps[s] = s * numVectors / fold;
            steps[fold] = numVectors;

            for (int f = 0; f < fold; f++)
            {

                trainingFileNames[f] = fns + "_" + f.ToString() + "_trn" + ".txt";
                testFileNames[f] = fns + "_" + f.ToString() + "_tst" + ".txt";
                StreamWriter trainingStreamWriter = new StreamWriter(trainingFileNames[f]);
                StreamWriter testStreamWriter = new StreamWriter(testFileNames[f]);
                trainingStreamWriter.WriteLine(headerLine);
                testStreamWriter.WriteLine(headerLine);

              //  double d = 0;
                if (noiseInput || noiseOutput)
                {
                    for (int v = 0; v < numVectors; v++)
                    {
                        if (v >= steps[f] && v < steps[f + 1]) //write these vectors to the test set
                        {
                            if (v >= steps[f] && v < steps[f + 1]) //write these vectors to the test set
                            {
                                for (int a = 0; a < numAttributes; a++)
                                    testStreamWriter.Write(DataSet[v, a] + " ");

                                testStreamWriter.WriteLine();
                            }
                        }
                        else //write these vectors to the training set
                        {
                            if (noiseInput)
                            {
                                if ((r = R.NextDouble()) < noiseFrequencyIn)
                                {

                                    for (int a = 0; a < numAttributes - 1; a++)
                                        trainingStreamWriter.Write(String.Format("{0:0.00000} ", (DataSet[v, a] + noiseValue * 1.72 *(R.NextDouble()+R.NextDouble()+R.NextDouble()+R.NextDouble()-2)))); 
                                }
                                else
                                {
                                    for (int a = 0; a < numAttributes - 1; a++)
                                        trainingStreamWriter.Write(String.Format("{0:0.00000}", DataSet[v, a]) + " ");
                                }
                            }
                            else
                            {
                                for (int a = 0; a < numAttributes - 1; a++)
                                    trainingStreamWriter.Write(String.Format("{0:0.00000}", DataSet[v, a]) + " ");
                            }

                            if (noiseOutput)
                            {
                                if (classification)
                                {

                                    if ((r = R.NextDouble()) < noiseFrequencyOut)                                    
                                        trainingStreamWriter.Write(String.Format("{0:0.00000} ", DataSet[R.Next(numVectors), numAttributes - 1]));                                  
                                    else
                                        trainingStreamWriter.Write(String.Format("{0:0.00000}", DataSet[v, numAttributes - 1]));
 

                                }
                                else
                                {

                                    if ((r = R.NextDouble()) < noiseFrequencyOut)
                                        trainingStreamWriter.Write(String.Format("{0:0.00000} ", (DataSet[v, numAttributes - 1] + noiseValue * 1.72 * (R.NextDouble() + R.NextDouble() + R.NextDouble() + R.NextDouble() - 2))));
                                    else
                                        trainingStreamWriter.Write(String.Format("{0:0.00000}", DataSet[v, numAttributes - 1]));
                                }
                            }
                            else
                            {
                                trainingStreamWriter.Write(String.Format("{0:0.00000}", DataSet[v, numAttributes - 1]));
                            }

                            trainingStreamWriter.WriteLine();
                        }
                    }
                }
                else
                {
                    for (int v = 0; v < numVectors; v++)
                    {
                        if (v >= steps[f] && v < steps[f + 1]) //write these vectors to the test set
                        {
                            for (int a = 0; a < numAttributes; a++)
                                testStreamWriter.Write(String.Format("{0:0.00000}", DataSet[v, a]) + " ");

                            testStreamWriter.WriteLine();
                        }
                        else //write these vectors to the training set
                        {
                            for (int a = 0; a < numAttributes; a++)
                                trainingStreamWriter.Write(String.Format("{0:0.00000}", DataSet[v, a]) + " ");

                            trainingStreamWriter.WriteLine();
                        }
                    }
                }
                trainingStreamWriter.Close();
                testStreamWriter.Close();


                //set the outlier coefficiants for the noisy training parts of the dataset
                //if ((noiseInput || noiseOutput) && OutlierNoise)
                //{
                //    kNN knn = new kNN(trainingFileNames[f], 9, 2);  //k=9
                //    knn.GetDistances();
                //    knn.ENN();
                //    trainingFileNames[f] = knn.SaveDataSetWithOutliers(0,trainingFileNames[f]);
                //}
                //if ((noiseInput || noiseOutput) && OutlierNoise)
                //{
                //    kNN knn = new kNN(trainingFileNames[f], 9, 2);  //k=9
                //    knn.GetDistances();
                //    knn.ENN();
                //    trainingFileNames[f] = knn.SaveDataSetWithOutliers(0, trainingFileNames[f]);
                //}



            }  // fold





            int brk = 0;
            for (int f = fold; f < numVectors; f++)
            {
                brk = 0;

                if (File.Exists(fns + "_" + f.ToString() + "_trn" + ".txt"))
                    File.Delete(fns + "_" + f.ToString() + "_trn" + ".txt");
                else
                    brk++;

                if (File.Exists(fns + "_" + f.ToString() + "_tst" + ".txt"))
                    File.Delete(fns + "_" + f.ToString() + "_tst" + ".txt");
                else
                    brk++;

                if (brk > 1)
                    break;
            }
        }



        double[,] LoadFile(string FileName)
        {
            StreamReader sr = new StreamReader(FileName);
            string theLine;
            headerLine = sr.ReadLine();
            string[] Headers = headerLine.Split(new string[] { " ", ";" }, StringSplitOptions.RemoveEmptyEntries);
            int numAttributes = Headers.Length;
            int numVectors = 0;
            while ((theLine = sr.ReadLine()) != null)
            {
                if (theLine.Trim().Length > 2)
                    numVectors++;
            }
            sr.Close();
            double[,] DataSet = new double[numVectors, numAttributes];

            int v = 0;
            sr = new StreamReader(FileName);
            theLine = sr.ReadLine();
            while ((theLine = sr.ReadLine()) != null)
            {
                if (theLine.Trim().Length > 2)
                {
                    string[] S = theLine.Split(new string[] { " ", ";" }, StringSplitOptions.RemoveEmptyEntries);
                    for (int a = 0; a < numAttributes; a++)
                        DataSet[v, a] = Double.Parse(S[a], System.Globalization.CultureInfo.InvariantCulture);
                    v++;
                }
            }
            sr.Close();
            return DataSet;
        }
    }
}
