﻿using System;
using System.Collections.Generic;
using System.Text;
using MathNet.Numerics.LinearAlgebra.Double;
using MathNet.Numerics.LinearAlgebra.Generic;
using innovations.ml.core.models;
using innovations.util.exts.mathdotnet;
using innovations.util.exts.msft;
using innovations.ml.core.collections;

namespace innovations.ml.core
{
    public class GradientChecker
    {
        public static double CheckGradient(Matrix<double> x, Vector<double> y, bool includesOnesColumn, int numberOfRecords, double lambda, int layers, List<Matrix<double>> multiLevelMultiClassTheta)
        {
            NeuralNetworkModel nn1 = new NeuralNetworkModel(x, y, includesOnesColumn, numberOfRecords, lambda, layers, multiLevelMultiClassTheta);
            NeuralNetworkModel nn2 = new NeuralNetworkModel(x, y, includesOnesColumn, numberOfRecords, lambda, layers, new List<Matrix<double>>(multiLevelMultiClassTheta.Clone<Matrix<double>>()));
            nn1.ComputeCost(true);
            List<Matrix<double>> numericalGradient = nn2.ComputeNumericalGradient();
            Vector<double> gradientMerge = null;
            Vector<double> numericGradientMerge = null;
            for (int i = 0; i < numericalGradient.Count; i++)
                if (i == 0)
                    numericGradientMerge = numericalGradient[i].ToVector(false);
                else
                    numericGradientMerge = numericGradientMerge.Merge(numericalGradient[i].ToVector(false));

            for (int i = 0; i < nn1.GradientLinkedList.Count; i++)
                if (i == 0)
                    gradientMerge = nn1.GradientLinkedList[i].ToVector(false);
                else
                    gradientMerge = gradientMerge.Merge(nn1.GradientLinkedList[i].ToVector(false));

            // Compare results and return difference.
            System.Diagnostics.Debug.WriteLine(gradientMerge.ToTable());
            return (numericGradientMerge.Subtract(gradientMerge)).Norm(2) / (numericGradientMerge.Add(gradientMerge)).Norm(2);
        }

    }
}
