﻿using System;
using System.Configuration;
using innovations.ml.core;
using innovations.ml.core.models;
using innovations.ml.core.solvers;
using innovations.ml.data;
using innovations.util.exts.mathdotnet;
using MathNet.Numerics.LinearAlgebra.Double;
using MathNet.Numerics.LinearAlgebra.Generic;
using Microsoft.VisualStudio.TestTools.UnitTesting;

namespace innovations.ml.test
{
    [TestClass]
    public class CoreTestsEx1
    {
        [TestMethod]
        public void WarmUpExercise()
        {
            Matrix<double> m = DenseMatrix.Identity(5);
            Assert.AreEqual(true, m.IsSymmetric);
            Assert.AreEqual(m.RowCount, m.Sum());
            for (int i = 0; i < m.RowCount; i++)
            {
                Vector<double> v1 = m.Column(i);
                Vector<double> v2 = m.Row(i);
                Assert.AreEqual(1, v1.Sum());
                Assert.AreEqual(1, v2.Sum());
            }
        }

        [TestMethod]
        public void RunGradientDescent()
        {
            CSVLoader csv = StartUp.LoadFile(ConfigurationManager.AppSettings["Ex1Data1File"].ToString());
            // Computing cost at initial theta
            Solver solver = new GradientDescent();
            solver.Model = new LinearRegressionModel(csv.X, csv.Y, false);
            solver.Model.ComputeCost();
            //Cost.ComputeLinearRegressionCost(dm);
            Assert.AreEqual(0.69314718055994629, solver.Model.J);

            // Running gradient descent
            solver.Iterations = 1500;
            solver.Alpha = 0.01;
            solver.Run();
            //Gradient.RunGradientDescent(dm, 1500, 0.01, false);
            Assert.AreEqual(-3.630291, Math.Round(solver.Model.Theta[0], 6));
            Assert.AreEqual(1.166362, Math.Round(solver.Model.Theta[1], 6));

            // Predicting profit based on theta values obtained from gradient descent
            double profit = Prediction.Predict(new DenseVector(new double[] { 1, 3.5 }), solver.Model.Theta * 10000);
            Assert.AreEqual(4519.767868, Math.Round(profit, 6));
            profit = Prediction.Predict(new DenseVector(new double[] { 1, 7 }), solver.Model.Theta * 10000);
            Assert.AreEqual(45342.450129, Math.Round(profit, 6));
        }

        [TestMethod]
        public void RunGradientDescentMulti()
        {
            CSVLoader csv = StartUp.LoadFile(ConfigurationManager.AppSettings["Ex1Data2File"].ToString());
            // Running gradient descent
            Solver solver = new GradientDescent();
            solver.Model = new LinearRegressionModel(csv.X, csv.Y, false);
            solver.Iterations = 400;
            solver.Alpha = 0.01;
            // Normalizing data
            FeatureNormalizer.Normalize(solver.Model);
            solver.Run();
            //Gradient.RunGradientDescent(dm1, 400, 0.01, true);
            Assert.AreEqual(334302.063993, Math.Round(solver.Model.Theta[0], 6));
            Assert.AreEqual(100087.116006, Math.Round(solver.Model.Theta[1], 6));
            Assert.AreEqual(3673.548451, Math.Round(solver.Model.Theta[2], 6));
        }

        [TestMethod]
        public void ComputeNormalEquation()
        {
            CSVLoader csv = StartUp.LoadFile(ConfigurationManager.AppSettings["Ex1Data2File"].ToString());
            IModel model = new LinearRegressionModel(csv.X, csv.Y, false);
            NormalEquation.Compute(model);
            Assert.AreEqual(89597.90954, Math.Round(model.Theta[0], 5)); // This only matches to the 5th decimal place
            Assert.AreEqual(139.210674, Math.Round(model.Theta[1], 6));
            Assert.AreEqual(-8738.019112, Math.Round(model.Theta[2], 6));
        }
    }
}