﻿using System;
using System.Collections.Generic;
using System.IO;
using System.Windows.Forms;
using MentalAlchemy.Atomics;
using MentalAlchemy.Compounds;
using MentalAlchemy.Molecules;
using MentalAlchemy.Molecules.MachineLearning;

namespace EvoPCA
{
	public partial class MainForm : Form
	{
		/// <summary>
		/// Number of samples to be generated from each distribution.
		/// </summary>
		protected const int DISTR_SAMPLES = 100;
        protected const string TRAINING_DATA_OUTPUT = "traindata.log";
		protected const string EVO_PCA = "EvoPCA";
		protected const string EVO_PCA2 = "EvoPCA2";
		protected const string EVO_PCA3 = "EvoPCA3";

		protected List<TrainingSample> trData;
		protected float[,] eVectors;
		protected List<float> vars;

		public MainForm()
		{
			InitializeComponent();

			Pca2Threshold.Value = EvoPCA2.DEFAULT_THRESHOLD;
			openFileDialog1.InitialDirectory = Application.StartupPath;

			AlgCombo.Items.Add(EVO_PCA);
			AlgCombo.Items.Add(EVO_PCA2);
			AlgCombo.Items.Add(EVO_PCA3);
			AlgCombo.SelectedIndex = AlgCombo.Items.Count - 2;
		}

		private void RunBtn_Click(object sender, EventArgs e)
		{
			var dims = (int)DimNumeric.Value;
			var distrCount = (int) DistrNumeric.Value;

			// generate training data.
			GenerateData(dims, distrCount);

			// run algorithm.
			var runsCount = (int)RunsNumeric.Value;

			// make parameter t run values from 10 to 100 with step 10.
			for (int t = 10; t <= 100; t += 10)
			{
				Pca2Threshold.Value = t;

				// perform many runs.
				var res = new List<RunResults>();
				for (int i = 0; i < runsCount; ++i)
				{
					var runRes = Run(dims, true);
					res.Add(runRes);
				}

				var folder = GetFolderName();
				if (!Directory.Exists(folder))
				{
					Directory.CreateDirectory(folder);
				}

				// prepare lines for log.
				var logLines = GetLogLines(res);
				WriteLog(Path.Combine(folder, "evopca.log"), logLines);
			}
			//File.WriteAllLines("evopca.log", logLines.ToArray());

			// todo: show eigenvectors for the training data and ANN weights.
		}

		public List<string> GetLogLines (List<RunResults> runs)
		{
			var res = new List<string>();

			// make header.
			res.Add("> EA parameters:");
			res.AddRange(eaPropertiesControl1.Parameters.ToStrings());
			res.Add("\n> Dimensions:\t" + DimNumeric.Value);
			res.Add("> Distributions:\t" + DistrNumeric.Value);
			res.Add("> Alpha:\t" + AlphaNumeric.Value);
			res.Add("> Multi-objective:\t" + MultiObjCheck.Checked);
			res.Add("> Algorithm:\t" + AlgCombo.SelectedItem.ToString());
			res.Add("> EvoPCA2 threshold:\t" + Pca2Threshold.Value);
			res.Add("> Runs count:\t" + RunsNumeric.Value);

			//
			// compute averaged fitness stats.
			#region - Fitness stats. -
			var avgStats = new List<Stats>();
			var allStats = new List<List<Stats>>();
			for (int i = 0; i < runs.Count; i++)
			{
				allStats.Add(runs[i].FitnessStats);
			}

			//
			// prepare stats for averaging.
			var maxDataCount = 0;
			foreach (var stat in allStats)
			{
				StructMath.FillGapsZero(stat);
				if (maxDataCount < stat[0].Data.Count)
				{
					maxDataCount = stat[0].Data.Count;
				}
			}

			//
			// append zero columns to make all data arrays to be of equal lengths.
			var zeroV = new List<float>(new float[allStats[0].Count]);
			for (int i = 0; i < allStats.Count; i++)
			{
				var stat = allStats[i];
				while (stat[0].Data.Count < maxDataCount)
				{
					StructMath.AppendStats(ref stat, zeroV, "");
				}
			}

			//
			// Make averaging.
			avgStats = StructMath.Average(allStats);
			res.Add("\n> Averaged NE run statistics:");
			res.AddRange(StructMath.ConvertToStringsList(avgStats, true)); 
			#endregion

			#region - Time stats. -
			var timeStats = new List<int>();
			for (int i = 0; i < runs.Count; i++)
			{
				timeStats.Add(runs[i].ElapsedTime);
			}
			res.Add("\n> Elapsed time data:");
			res.Add(VectorMath.ConvertToString(timeStats.ToArray(), '\t'));

			res.Add("\n> Elapsed time stats:");
			var tStats = VectorMath.CalculateStats(timeStats.ToArray());
			res.Add(tStats.GetStatsHeader());
			res.Add(tStats.GetStatsString());
			#endregion

			#region - Sum of variances rates. -
			var varSumRate = new List<float>();
			for (int i = 0; i < runs.Count; i++)
			{
				varSumRate.Add(runs[i].SumVarRate);
			}
			res.Add("\n> Sum variances rates:");
			res.Add(VectorMath.ConvertToString(varSumRate.ToArray(), '\t'));

			res.Add("\n> Sum variances rates stats:");
			var varStats = VectorMath.CalculateStats(varSumRate.ToArray());
			res.Add(varStats.GetStatsHeader());
			res.Add(varStats.GetStatsString()); 
			#endregion

			#region - Variances. -
			res.Add("\n>Eigen projections variances:");
			res.Add(VectorMath.ConvertToString(vars.ToArray(), '\t'));

			res.Add("\n>Variances:");
			for (int i = 0; i < runs.Count; i++)
			{
				res.Add(VectorMath.ConvertToString(runs[i].Vars.ToArray(), '\t'));
			}
			#endregion

			#region - Elapsed times. -
			var times = new List<int>();
			for (int i = 0; i < runs.Count; i++)
			{
				times.Add(runs[i].ElapsedTime);
			}
			res.Add("\n>Elapsed times:");
			res.Add(VectorMath.ConvertToString(times.ToArray(), '\t'));

			res.Add("\n> Elapsed times stats:");
			varStats = VectorMath.CalculateStats(times.ToArray());
			res.Add(varStats.GetStatsHeader());
			res.Add(varStats.GetStatsString());
			#endregion

			#region - Individual weights. -
			res.Add("\n> Resulting networks' weights");
			for (int i = 0; i < runs.Count; i++)
			{
				res.Add(">Run #" + (i+1));
				res.AddRange(MatrixMath.ConvertToRowsStringsList(runs[i].Weights, '\t'));
				res.Add("");
			}
			#endregion

			return res;
		}

		private void GenerateData(int dims, int distrCount)
		{
			trData = MachineLearningElements.CreateGaussianSamples(DISTR_SAMPLES, dims, distrCount);

			// compute e-vectors.
			var ac = MachineLearningElements.ComputeAutocorrelationMatrix(trData);
			if (!MatrixMath.CheckValues(ac))
			{
				MessageBox.Show("Autocorrelation matrix is corrupted.");
				return;
			}

			float[] eval;
			// computes eigenvector and eigenvalues and writes eigenvectors as columns.
			Numerics.EigenMathNet.Eig(ac, out eVectors, out eval);

			// compute e-vector for auto-covariance matrix.
			var trVecs = MachineLearningElements.ConvertToMatrix(trData);
			var cov = MatrixMath.ComputeCovarianceMatrix(MatrixMath.ConvertToColumnsList(trVecs));
			float[,] covEig;
			Numerics.EigenMathNet.Eig(cov, out covEig, out eval);

			// compute variance of the data along.
			var trMatrix = MachineLearningElements.ConvertToMatrix(trData);
			var trInputs = MatrixMath.ConvertToRowsList(trMatrix);
			var dots = VectorMath.DotProduct(trInputs, MatrixMath.ConvertToColumnsList(eVectors));
			var dotCols = MatrixMath.ConvertToColumnsList(dots);
			vars = new List<float>(VectorMath.VarianceList(dotCols));
			vars.Reverse();

			// show e-vectors.
			ShowEigenVectors(TrainEigenVectorsDataTable, eVectors);
			//ShowEigenVectors(TrainEigenVectorsDataTable, covEig);
		}

		private void LoadProben1Data(string filename)
		{
			List<TrainingSample> valData, testData;
			MachineLearningElements.LoadProben1Data(filename, out trData, out valData, out testData);

			// compute e-vectors.
			var ac = MachineLearningElements.ComputeAutocorrelationMatrix(trData);
			if (!MatrixMath.CheckValues(ac))
			{
				MessageBox.Show("Autocorrelation matrix is corrupted.");
				return;
			}

			float[] eval;
			// computes eigenvector and eigenvalues and writes eigenvectors as columns.
			try
			{
				Numerics.EigenMathNet.Eig(ac, out eVectors, out eval);
			}
			catch
			{	// create default eigenvectors and eigenvalues.
				eVectors = MatrixMath.Identity(ac.GetLength(0));
				eval = VectorMath.Ones(ac.GetLength(0));
			}

			// compute e-vector for auto-covariance matrix.
			var trVecs = MachineLearningElements.ConvertToMatrix(trData);
			var cov = MatrixMath.ComputeCovarianceMatrix(MatrixMath.ConvertToColumnsList(trVecs));
			float[,] covEig;
			try
			{
				Numerics.EigenMathNet.Eig(cov, out covEig, out eval);
			}
			catch
			{	// create default eigenvectors and eigenvalues.
				covEig = MatrixMath.Identity(cov.GetLength(0));
				eval = VectorMath.Ones(cov.GetLength(0));
			}

			// compute variance of the data along.
			var trMatrix = MachineLearningElements.ConvertToMatrix(trData);
			var trInputs = MatrixMath.ConvertToRowsList(trMatrix);
			var dots = VectorMath.DotProduct(trInputs, MatrixMath.ConvertToColumnsList(eVectors));
			var dotCols = MatrixMath.ConvertToColumnsList(dots);
			vars = new List<float>(VectorMath.VarianceList(dotCols));
			vars.Reverse();

			// show e-vectors.
			ShowEigenVectors(TrainEigenVectorsDataTable, eVectors);
			//ShowEigenVectors(TrainEigenVectorsDataTable, covEig);
		}

		private void LoadMnistData(string folder)
		{
			//List<TrainingSample> valData, testData;
			List<byte> trainLabels, testLabels;
			List<byte[,]> trainImg, testImg;
			MNISTExtractor.ExtractEverything(folder, out trainLabels, out trainImg, out testLabels, out testImg);

			// form training and validation data.
			// get random [testCount] train samples to form the validation set.
			if (trData != null) {trData.Clear();}
			else {trData = new List<TrainingSample>();}

			var maxTrainSize = 100;
			for (int i = 0; i < maxTrainSize; i++)
			{
				var trSample = new TrainingSample();
				trSample.ClassID = trainLabels[i];

				var vec = MatrixMath.ConvertToVector(trainImg[i]);
				var vecF = VectorMath.ConvertToFloats(vec);
				trSample.Data = MatrixMath.CreateFromVector(vecF, vecF.Length);

				trData.Add(trSample);
			}


			// compute e-vectors.
			//var ac = MachineLearningElements.ComputeAutocorrelationMatrix(trData);
			var ac = MatrixMath.Identity(trData[0].Data.Length);
			if (!MatrixMath.CheckValues(ac))
			{
				MessageBox.Show("Autocorrelation matrix is corrupted.");
				return;
			}

			float[] eval;
			// computes eigenvector and eigenvalues and writes eigenvectors as columns.
			Numerics.EigenMathNet.Eig(ac, out eVectors, out eval);

			//// compute e-vector for auto-covariance matrix.
			//var trVecs = MachineLearningElements.ConvertToMatrix(trData);
			//var cov = MatrixMath.ComputeCovarianceMatrix(MatrixMath.ConvertToColumnsList(trVecs));
			//float[,] covEig;
			//Numerics.EigenMathNet.Eig(cov, out covEig, out eval);

			// compute variance of the data along.
			var trMatrix = MachineLearningElements.ConvertToMatrix(trData);
			var trInputs = MatrixMath.ConvertToRowsList(trMatrix);
			var dots = VectorMath.DotProduct(trInputs, MatrixMath.ConvertToColumnsList(eVectors));
			var dotCols = MatrixMath.ConvertToColumnsList(dots);
			vars = new List<float>(VectorMath.VarianceList(dotCols));
			vars.Reverse();

			// show e-vectors.
			//ShowEigenVectors(TrainEigenVectorsDataTable, eVectors);
			//ShowEigenVectors(TrainEigenVectorsDataTable, covEig);
		}

		private static void ShowEigenVectors (DataTableControl table, float[,] evec)
		{
			// transpose data.
			var cols = MatrixMath.ConvertToColumnsList(evec);
			var colStr = MatrixMath.ConvertToStringsList(cols);
			table.DataRows = colStr;
		}

		private RunResults Run (int dims, bool showEVectors)
		{
			// create ANN w/o hidden layers.
			var outsCount = (int) (dims*(float) AlphaNumeric.Value);
			var annProps = new NeuralNetProperties();
			annProps.UseBias = false;
			annProps.nodesNumber = new[] { dims,  outsCount};
			annProps.actFunctions = new ActivationFunction []{ActivationFunctions.Identity, ActivationFunctions.Linear};
			var ann = LayeredNeuralNetwork.CreateNetwork(annProps);

			// create and run NE algorithm.
			var ne = GetAlgorithm ();
			var eaparams = eaPropertiesControl1.Parameters;
			eaparams.IndividualSize = ann.GetTotalConnectionsNumber();
			eaparams.MRate = 1f/eaparams.IndividualSize;
			ne.NeuralNetwork = ann;

			var objFunc = CombinedAnn.FitnessFunctions.GetFitnessFunction(CombinedAnn.FitnessFunctions.VARIANCE_MAX);
			objFunc.Network = ann;
			objFunc.TrainData = trData;
			FitnessComparator.MinimizeFitness = objFunc.MinimizeFitness;
			FitnessComparator.Multiobjective = MultiObjCheck.Checked;
			ne.FitnessFunction = objFunc;

			ne.Run(eaparams);

			var res = new RunResults();
			res.FitnessStats = ne.FitnessStats;
			res.ElapsedTime = (int)ne.ElapsedMilliseconds;

			//
			// get best individual info.
			var best = ne.BestIndividual;
			if (showEVectors)
			{
				res.Weights = VectorMath.Split(best.Genes.ToArray(), outsCount);
				var evec = MatrixMath.CreateFromColsList(res.Weights);
				ShowEigenVectors(AnnWeightsDataTable, evec);
			}

			//
			// compute variances of projections onto best individual vectors.
			ne.NeuralNetwork.SetConnectionWeights(best.Genes);
			var outs = MachineLearningElements.GetOutputs(ne.NeuralNetwork, trData);
			var transp = MatrixMath.Transpose(outs);
			res.Vars = new List<float>(VectorMath.VarianceList(transp));

			#region - Write log. -
			var logLines = NeuroEvolutionaryAlgorithm.GetLogLines(ne, eaparams, annProps);

			logLines.Add("\n> Variances of projections onto eigenvectors:");
			logLines.Add(VectorMath.ConvertToString(vars.ToArray(), '\t'));
			var sumVar = VectorMath.Sum(vars.ToArray());
			logLines.Add("> Sum of variances:\t " + VectorMath.Sum(vars.ToArray()));

			logLines.Add("\n> Variances of projections onto ANN weights:");
			logLines.Add(VectorMath.ConvertToString(res.Vars.ToArray(), '\t'));
			var sumVarAnn = VectorMath.Sum(res.Vars.ToArray());
			logLines.Add("> Sum of variances:\t " + sumVarAnn);
			res.SumVarRate = sumVarAnn/sumVar;
			logLines.Add("> Rate of sums of variances:\t " + res.SumVarRate);

			// ANN responses.
			var allOuts = MachineLearningElements.GetOutputs(ne.NeuralNetwork, trData);
			var outsStr = MatrixMath.ConvertToRowsStringsList(allOuts, '\t');
			logLines.Add("\n> ANN responses:");
			logLines.AddRange(outsStr);

			//WriteLog("run.log", logLines); 
			#endregion

			return res;
		}

		private string GetFolderName ()
		{
			var res = "d{0}x{1} t{2}";
			return string.Format(res, DimNumeric.Value, DistrNumeric.Value, Pca2Threshold.Value);
		}

		private void WriteLog(string filename, List<string> lines)
		{
			// add info about training data.
			lines.Add("\n> Training data eigenvectors");
			lines.AddRange(MatrixMath.ConvertToColumnsStringsList(eVectors));
			File.WriteAllLines(filename, lines.ToArray());

			// write training data info.
			File.WriteAllLines(TRAINING_DATA_OUTPUT, MachineLearningElements.ConvertInputsToStrings(trData).ToArray());
		}

		private NeuroEvolutionaryAlgorithm GetAlgorithm ()
		{
			var selAlg = AlgCombo.SelectedItem.ToString();
			if (selAlg == EVO_PCA2)
			{
				var res = new EvoPCA2();
				res.FactorThreshold = (float)Pca2Threshold.Value;
				res.ReferenceSumVariance = VectorMath.Sum(vars.ToArray());
				res.VariableMutationStep = AdaptMutStepCheck.Checked;
				res.DataAmount = (int)DataAmountNumeric.Value;
				return res;
			}
			if (selAlg == EVO_PCA3)
			{
				var res = new EvoPCA3();
				res.FactorThreshold = (float)Pca2Threshold.Value;
				res.ReferenceSumVariance = VectorMath.Sum(vars.ToArray());
				res.VariableMutationStep = AdaptMutStepCheck.Checked;
				return res;
			}
			var alg = new MentalAlchemy.Molecules.MachineLearning.EvoPCA();
			alg.VariableMutationStep = AdaptMutStepCheck.Checked;
			return alg;
		}

		private void RunProben1Btn_Click(object sender, EventArgs e)
		{
			var dlgRes = openFileDialog1.ShowDialog();
			if (dlgRes != DialogResult.OK) { return; }

			// load Proben1 data.
			var filename = openFileDialog1.FileName;
			LoadProben1Data(filename);
			
			// run algorithm.
			var runsCount = (int)RunsNumeric.Value;
			var dims = trData[0].Data.GetLength(1);

			// make parameter t run values from 10 to 100 with step 10.
			for (int t = 5; t <= 50; t += 5)
			{
				Pca2Threshold.Value = t;

				// perform many runs.
				var res = new List<RunResults>();
				for (int i = 0; i < runsCount; ++i)
				{
					var runRes = Run(dims, true);
					res.Add(runRes);
				}

				var folder = Path.GetFileNameWithoutExtension(filename) + "_" + t;
				if (!Directory.Exists(folder))
				{
					Directory.CreateDirectory(folder);
				}

				// prepare lines for log.
				var logLines = GetLogLines(res);
				WriteLog(Path.Combine(folder, "evopca.log"), logLines);
			}
		}

		private void RunMnistBtn_Click(object sender, EventArgs e)
		{
			// locate MNIST data folder.
			var dlgRes = folderBrowserDialog1.ShowDialog();
			if (dlgRes != DialogResult.OK) { return; }

			// load MNIST data.
			var folder = folderBrowserDialog1.SelectedPath;
			LoadMnistData(folder);

			// run algorithm.
			var runsCount = (int)RunsNumeric.Value;
			var dims = trData[0].Data.GetLength(1);

			// make parameter t run values from 10 to 100 with step 10.
			for (int t = 10; t <= 20; t += 10)
			{
				Pca2Threshold.Value = t;

				// perform many runs.
				var res = new List<RunResults>();
				for (int i = 0; i < runsCount; ++i)
				{
					var runRes = Run(dims, false);
					res.Add(runRes);
				}

				var logFolder = "MNIST_" + t;
				if (!Directory.Exists(logFolder))
				{
					Directory.CreateDirectory(logFolder);
				}

				// prepare lines for log.
				var logLines = GetLogLines(res);
				WriteLog(Path.Combine(logFolder, "evopca.log"), logLines);
			}
		}
	}

	public class RunResults
	{
		public List<Stats> FitnessStats;
		public List<float> Vars;	// variances of projections onto remaining vectors.
		public float SumVarRate;	// rate of variances along remaining vectors to the overall sum of variances along eigenvectors.
		public List<float[]> Weights;	// resulting ANN weights.
		public int ElapsedTime;
	}
}
