﻿using System;
using innovations.ml.core;
using innovations.ml.core.models;
using innovations.ml.core.solvers;
using innovations.ml.data;
using MathNet.Numerics.LinearAlgebra.Double;
using MathNet.Numerics.LinearAlgebra.Generic;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Configuration;
using System.Collections.Generic;

namespace innovations.ml.test
{
    [TestClass]
    public class CoreTestsEx4
    {
        [TestMethod]
        public void ComputeCostFeedforward()
        {
            CSVLoader csvData;
            CSVWeightsLoader csvWeights;
            string dataPath = ConfigurationManager.AppSettings["NeuralNetworks4Data"].ToString();
            csvData = StartUp.LoadFile(dataPath);
            for (int i = 0; i < csvData.Y.Count; i++)
                csvData.Y[i] -= 1;
            string[] weightFiles = new string[2];
            weightFiles[0] = ConfigurationManager.AppSettings["NeuralNetworks4Weights1"].ToString();
            weightFiles[1] = ConfigurationManager.AppSettings["NeuralNetworks4Weights2"].ToString();
            csvWeights = StartUp.LoadWeightFile(weightFiles);
            List<Matrix<double>> multiLevelMultiClassTheta = new List<Matrix<double>>();
            multiLevelMultiClassTheta.AddRange(csvWeights.ThetaList);
            NeuralNetworkModel neuralNetwork = new NeuralNetworkModel(csvData.X, csvData.Y, false, 10, 0.0, 3, multiLevelMultiClassTheta);
            neuralNetwork.ComputeCost();
            Assert.AreEqual(0.287629, Math.Round(neuralNetwork.J, 6));
        }

        [TestMethod]
        public void ComputeCostFeedforwardWithRegularization()
        {
            CSVLoader csvData;
            CSVWeightsLoader csvWeights;
            string dataPath = ConfigurationManager.AppSettings["NeuralNetworks4Data"].ToString();
            csvData = StartUp.LoadFile(dataPath);
            for (int i = 0; i < csvData.Y.Count; i++)
                csvData.Y[i] -= 1;
            string[] weightFiles = new string[2];
            weightFiles[0] = ConfigurationManager.AppSettings["NeuralNetworks4Weights1"].ToString();
            weightFiles[1] = ConfigurationManager.AppSettings["NeuralNetworks4Weights2"].ToString();
            csvWeights = StartUp.LoadWeightFile(weightFiles);
            List<Matrix<double>> multiLevelMultiClassTheta = new List<Matrix<double>>();
            multiLevelMultiClassTheta.AddRange(csvWeights.ThetaList);
            NeuralNetworkModel neuralNetwork = new NeuralNetworkModel(csvData.X, csvData.Y, false, 10, 1.0, 3, multiLevelMultiClassTheta);
            neuralNetwork.ComputeCost();
            Assert.AreEqual(0.383770, Math.Round(neuralNetwork.J, 6));
        }

        [TestMethod]
        public void ComputeSigmoidGradient()
        {
            Vector<double> gradient = Sigmoid.ComputeGradient(new DenseVector(new double[] { 1.0, -0.5, 0.0, 0.5, 1.0 }));
            Assert.AreEqual(0.196612, Math.Round(gradient[0], 6));
            Assert.AreEqual(0.235004, Math.Round(gradient[1], 6));
            Assert.AreEqual(0.250000, Math.Round(gradient[2], 6));
            Assert.AreEqual(0.235004, Math.Round(gradient[3], 6));
            Assert.AreEqual(0.196612, Math.Round(gradient[4], 6));
        }

        [TestMethod]
        public void ImplementBackpropagation()
        {
            const int INPUT_LAYER_SIZE = 3;
            const int HIDDEN_LAYER_SIZE = 5;
            const int LABEL_COUNT = 3;
            const int M = 5;
            const int LAYER_COUNT = 3;
            const double NUMERICAL_GRADIENT_THRESHOLD = 1e-9;
            double lambda = 0.0;
            // Number of records in training set              
            // We generate some 'random' test data             
            Matrix<double> Theta1 = NeuralNetworkModel.DebugInitializeWeights(HIDDEN_LAYER_SIZE, INPUT_LAYER_SIZE);
            Matrix<double> Theta2 = NeuralNetworkModel.DebugInitializeWeights(LABEL_COUNT, HIDDEN_LAYER_SIZE);
            // Reusing debugInitializeWeights to generate X             
            Matrix<double> X = NeuralNetworkModel.DebugInitializeWeights(M, INPUT_LAYER_SIZE - 1);
            Vector<double> Y = CreateYVector(1, M, LABEL_COUNT);
            List<Matrix<double>> multiLevelMultiClassTheta = new List<Matrix<double>>();
            multiLevelMultiClassTheta.Add(Theta1);
            multiLevelMultiClassTheta.Add(Theta2);
            double result = GradientChecker.CheckGradient(X, Y, false, 3, (double)lambda, LAYER_COUNT, multiLevelMultiClassTheta);
            Assert.IsTrue(result < NUMERICAL_GRADIENT_THRESHOLD);
        }

        [TestMethod]
        public void ImplementBackpropagationWithRegularization()
        {
            const int INPUT_LAYER_SIZE = 3;
            const int HIDDEN_LAYER_SIZE = 5;
            const int LABEL_COUNT = 3;
            const int M = 5;
            const int LAYER_COUNT = 3;
            const double NUMERICAL_GRADIENT_THRESHOLD = 1e-9;
            double lambda = 3.0;
            // Number of records in training set              
            // We generate some 'random' test data             
            Matrix<double> Theta1 = NeuralNetworkModel.DebugInitializeWeights(HIDDEN_LAYER_SIZE, INPUT_LAYER_SIZE);
            Matrix<double> Theta2 = NeuralNetworkModel.DebugInitializeWeights(LABEL_COUNT, HIDDEN_LAYER_SIZE);
            // Reusing debugInitializeWeights to generate X             
            Matrix<double> X = NeuralNetworkModel.DebugInitializeWeights(M, INPUT_LAYER_SIZE - 1);
            Vector<double> Y = CreateYVector(1, M, LABEL_COUNT);
            List<Matrix<double>> multiLevelMultiClassTheta = new List<Matrix<double>>();
            multiLevelMultiClassTheta.Add(Theta1);
            multiLevelMultiClassTheta.Add(Theta2);
            double result = GradientChecker.CheckGradient(X, Y, false, 3, (double)lambda, LAYER_COUNT, multiLevelMultiClassTheta);
            Assert.IsTrue(result < NUMERICAL_GRADIENT_THRESHOLD);
        }

        [TestMethod]
        public void TrainNN()
        {
            const int INPUT_LAYER_SIZE = 400;  // 20x20 Input Images of Digits
            const int HIDDEN_LAYER_SIZE = 25;  // 25 hidden units
            const int LABEL_COUNT = 10;  // 10 labels, from 1 to 10 
            const int LAYER_COUNT = 3;
            Matrix<double> initialTheta1 = innovations.util.RandomWeights.Initialize(INPUT_LAYER_SIZE, HIDDEN_LAYER_SIZE);
            Matrix<double> initialTheta2 = innovations.util.RandomWeights.Initialize(HIDDEN_LAYER_SIZE, LABEL_COUNT);
            //GradientChecker_temp.Check(LAYER_COUNT);

        }

        private static Vector<double> CreateYVector(int startValue, int endValue, int modValue)
        {
            Vector<double> vector = new DenseVector((endValue + 1) - startValue);
            for (int i = 0; i < vector.Count; i++)
            {
                vector[i] = (startValue % modValue);
                startValue++;
            }
            return vector;
        } 

        // TODO: Add a test method that runs the AND, NOT, NOR, and XNOR examples in ML-CLass lecture 8.  
        // This test will be used to better understand how BackProp works.  I will build some sort of 
        // visual demonstration of this simple sample of BackProp.
    }
}