﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Excel = Microsoft.Office.Interop.Excel;
using Meta.Numerics.Statistics.Distributions;

namespace ExcelAddIn1
{
    /**
     * Implementation of the F-test for equal variances.
     */
    class Test_EqualVariance : Test
    {
        private bool equal = false;

        public Test_EqualVariance(DataContainer data)
        {
            this.data = data;
            res = new List<String>();
        }

        public override string GetInfo()
        {
            String s = "Performs an F-test to check if two or more datasets have equal variances. ";
            s += "The test requires that the datasets are normally distributed and independent.";
            return s;
        }

        public bool IsEqual()
        {
            return equal;
        }

        public override void RunTest()
        {
            res.Add("F-test for equal variance");
            
            //Error check
            if (data.GetNoSets() < 2)
            {
                res.Add("At least two datasets is required!");
                return;
            }

            for (int i1 = 0; i1 < data.GetNoSets(); i1++)
            {
                for (int i2 = 0; i2 < data.GetNoSets(); i2++)
                {
                    if (i1 != i2 && i1 < i2)
                    {
                        RunTest(i1, i2);
                    }
                }
            }
        }

        public void RunTest(int i1, int i2)
        {
            DataSet d1 = data.GetDataSet(i1);
            DataSet d2 = data.GetDataSet(i2);

            //Calculate sample size and standard deviation.
            //Note that dataset 1 must have higher stdev than dataset 2
            double n1 = 0, n2 = 0, stdev1 = 0, stdev2 = 0;
            if (d1.GetStDev() > d2.GetStDev())
            {
                n1 = (double)d1.GetN();
                n2 = (double)d2.GetN();
                stdev1 = d1.GetStDev();
                stdev2 = d2.GetStDev();
            }
            else
            {
                n1 = (double)d2.GetN();
                n2 = (double)d1.GetN();
                stdev1 = d2.GetStDev();
                stdev2 = d1.GetStDev();
            }

            //Calculate F-score
            double F = Math.Pow(stdev1, 2) / Math.Pow(stdev2, 2);

            //Calculate Critical F
            double DF1 = n1 - 1.0;
            double DF2 = n2 - 1.0;

            FisherDistribution fdist = new FisherDistribution(DF1, DF2);
            double Fc = fdist.InverseRightProbability(alpha / 2.0);

            //Calculate p
            double p = fdist.RightProbability(F) * 2.0;

            //Results
            res.Add(";" + d1.GetName() + ";" + d2.GetName() + ";-");
            res.Add("N;" + d1.GetN() + ";" + d2.GetN() + ";-");
            res.Add("Mean;" + d1.GetMean().ToString("F2") + ";" + d2.GetMean().ToString("F2"));
            res.Add("StDev;" + d1.GetStDev().ToString("F2") + ";" + d2.GetStDev().ToString("F2"));
            res.Add("DoF1;" + (int)DF1);
            res.Add("DoF2;" + (int)DF2);
            res.Add("α;" + alpha + ";;-");
            res.Add("P-value;" + p.ToString("F3"));
            res.Add("F-score;" + F.ToString("F2"));
            res.Add("F-crit;" + Fc.ToString("F2"));
            if (F < Fc)
            {
                res.Add("Result;Equal variances (significance level " + p.ToString("F3") + ")");
                equal = true;
            }
            else
            {
                res.Add("Result;Equal variances (significance level " + p.ToString("F3") + ")");
                equal = false;
            }
            res.Add(";;;-");
        }
    }
}
