﻿using System;
using System.Collections.Generic;
using System.IO;
using System.Windows.Forms;
using MentalAlchemy.Atomics;
using MentalAlchemy.Molecules;
using MentalAlchemy.Molecules.MachineLearning;
using MatrixMath=MentalAlchemy.Atomics.MatrixMath;
using StructMath=MentalAlchemy.Atomics.StructMath;

namespace OrthoEA
{
	public partial class MainForm : Form
	{
		private const float RANK_EPS = 1e-3f;

		private float[,] trainFeaturesGram;	// Gram matrix for features from the training set.
		private List<TrainingSample> trainData;
		private List<TrainingSample> validData;
		private List<TrainingSample> testData;
		private string datafile;

		public MainForm()
		{
			InitializeComponent();

			//
			// Init activations combo-box.
			var funcs = ActivationFunctions.Functions();
			ActivationCombo.Items.AddRange(funcs);
			ActivationCombo.SelectedIndex = 0;

			//
			// Init regularizers combo.
			var regs = Regularizers.GetRegularizers();
			regs.Insert(0, "No regularizer");
			RegCombo.Items.AddRange(regs.ToArray());
			RegCombo.SelectedIndex = 0;

			//
			// Init fitness functions.
			var fits = CombinedAnn.FitnessFunctions.GetFitnessFunctions();
			FitnessCombo.Items.AddRange(fits);
			FitnessCombo.SelectedIndex = 0;

			//
			// reset use bias check-box.
			UseBiasCheck.Checked = false;
		}

		private void EABtn_Click(object sender, EventArgs e)
		{
			// average results over 10 runs.
			const int RUNS_COUNT = 10;
			var stats = new List<List<Stats>>();
			for (int i = 0; i < RUNS_COUNT; i++)
			{
				var filename = string.Format("run_{0}.log", (i + 1));
				var runRes = RunEA(filename);
				stats.Add(runRes);
			}
			var avgStats = StructMath.Average(stats);
			var statLines = StructMath.ConvertToStringsList(avgStats, true);
			WriteLog("run.log", statLines);
		}

		private List<Stats> RunEA (string logfile)
		{
			var lines = new List<string>();

			var inputsCount = trainData[0].Data.GetLength(1);

			var objFunc = CombinedAnn.FitnessFunctions.GetFitnessFunction((string)FitnessCombo.SelectedItem);
			objFunc.Regularizer = Regularizers.GetRegularizer((string)RegCombo.SelectedItem);

			//
			// Create ANN.
			var netProps = new NeuralNetProperties();
			netProps.actFunctions = new [] {ActivationFunctions.Identity, ActivationFunctions.GetActivationFunction((string)ActivationCombo.SelectedItem)};
			netProps.nodesNumber = new[] { inputsCount, (int)(inputsCount * AlphaNumeric.Value) };
			netProps.UseBias = UseBiasCheck.Checked;
			objFunc.Network = LayeredNeuralNetwork.CreateNetwork(netProps);
			objFunc.TrainData = trainData;

			if (objFunc is RegGramMatrixObjFunction)
			{
				//((RegGramMatrixObjFunction)objFunc).Alpha = 0.5f;	// use both variance and the determinant.
				((RegGramMatrixObjFunction)objFunc).Alpha = 0f;	// use the determinant value only.
				//((RegGramMatrixObjFunction)objFunc).Alpha = 1f;	// use mean variance value only.
			}

			//
			// Create NE algorithm.
			var ne = new NeuroEvolutionaryAlgorithm();
			ne.FitnessFunction = objFunc.ObjectiveFunction;
			ne.NeuralNetwork = objFunc.Network;
			FitnessComparator.MinimizeFitness = objFunc.MinimizeFitness;

			//
			// Run NE algorithm.
			var pars = eaPropertiesControl1.Parameters;
			pars.IndividualSize = ne.NeuralNetwork.GetTotalConnectionsNumber();
			ne.Run(pars);
			var stats = ne.FitnessStats;
			lines.AddRange(StructMath.ConvertToStringsList(stats, true));

			//
			// calculate final ANN output signals rank.
			ne.NeuralNetwork.SetConnectionWeights(ne.BestIndividual.Genes.ToArray());
			if (objFunc is GramMatrixVarianceObjFunction)
			{
				ne.NeuralNetwork.NormalizeWeights();
			}
			float[,] gram;
			var allOuts = new List<float[]>();
			var rank = GetOutputsInfo(ne.NeuralNetwork, trainData, out gram, out allOuts);
			// write ANN responses.
			var strResps = MatrixMath.ConvertToStringsList(allOuts);
			FileIO.WriteColumns("trainresp.log", strResps, "ANN responses on training data.");
			
			float[,] gramTest;
			var rankTest = GetOutputsInfo(ne.NeuralNetwork, testData, out gramTest, out allOuts);
			// write ANN responses.
			strResps = MatrixMath.ConvertToStringsList(allOuts);
			FileIO.WriteColumns("testresp.log", strResps, "ANN responses on test data.");

			lines.Add("");
			lines.Add(string.Format("Starting rank:\t{0}", InputRankBox.Text));
			lines.Add(string.Format("Resulting rank (training):\t{0}", rank));
			lines.Add(string.Format("Resulting rank (testing):\t{0}", rankTest));
			lines.Add("");		// write Gram matrix for input features.
			lines.Add("Gram matrix for input features:");
			lines.AddRange(MatrixMath.ConvertToRowsStringsList(trainFeaturesGram, '\t'));
			lines.Add(string.Format("Gram matrix determinant:\t{0}", Numerics.Determinant(trainFeaturesGram, 0f)));
			lines.Add("");		// write Gram matrix for ANN output signals.
			lines.Add("Gram matrix for ANN output signals (training data):");
			lines.AddRange(MatrixMath.ConvertToRowsStringsList(gram, '\t'));
			lines.Add(string.Format("Gram matrix determinant:\t{0}", Numerics.Determinant(gram, 0f)));
			lines.Add("");		// write Gram matrix for ANN output signals.
			lines.Add("Gram matrix for ANN output signals (test data):");
			lines.AddRange(MatrixMath.ConvertToRowsStringsList(gramTest, '\t'));
			lines.Add(string.Format("Gram matrix determinant:\t{0}", Numerics.Determinant(gramTest, 0f)));
			lines.Add("");		// ANN weights.
			lines.Add("Best ANN info:");
			lines.AddRange(ne.NeuralNetwork.Layers[1].ToStringsNorm());
			
			ResBox.Lines = lines.ToArray();

			//
			// write log.
			WriteLog(logfile, lines);

			return stats;
		}

		private void LoadBtn_Click(object sender, EventArgs e)
		{
			var dlgRes = openFileDialog1.ShowDialog();
			if (dlgRes != DialogResult.OK) return;

			datafile = openFileDialog1.FileName;
			MachineLearningElements.LoadProben1Data(datafile, out trainData, out validData, out testData);

			//
			// calculate rank of the training data input signals.
			var rank = GetRank(trainData);
			InputRankBox.Text = rank.ToString();
			InputCountBox.Text = trainData[0].Data.Length.ToString();

			//
			// calculate Gram matrix for the normalized input data.
			var m = MachineLearningElements.ConvertToMatrix(trainData);
			var cols = MatrixMath.ConvertToColumnsList(m);
			for (int i = 0; i < cols.Count; i++) {cols[i] = VectorMath.NormalizeL2(cols[i]);}
			trainFeaturesGram = MatrixMath.ComputeGramMatrix(cols);
		}

		private void WriteLog (string logfile, List<string> lines)
		{
			using (var writer = new StreamWriter(logfile))
			{
				writer.WriteLine(string.Format("Source data file:\t{0}", datafile));
				writer.WriteLine();
				EAElements.Write(writer, eaPropertiesControl1.Parameters);
				writer.WriteLine();
				writer.WriteLine(string.Format("Alpha:\t{0}", AlphaNumeric.Value));
				writer.WriteLine(string.Format("Activation function:\t{0}", ActivationCombo.SelectedItem));
				writer.WriteLine(string.Format("Source data file:\t{0}", RegCombo.SelectedItem));
				writer.WriteLine(string.Format("Fitness function:\t{0}", FitnessCombo.SelectedItem));
				writer.WriteLine();
				writer.WriteLine("EA search results:");
				FileIO.WriteAllLines(writer, lines);
			}
		}

		#region - Static methods. -
		/// <summary>
		/// Returns rank of the data features.
		/// </summary>
		/// <param name="data"></param>
		/// <returns></returns>
		private static int GetRank(List<TrainingSample> data)
		{
			var m = MachineLearningElements.ConvertToMatrix(data);
			return Numerics.Rank(m, RANK_EPS);
		} 

		/// <summary>
		/// Returns rank of the output signals for the given network and data samples.
		/// </summary>
		/// <param name="net"></param>
		/// <param name="data"></param>
		/// <param name="gram"></param>
		/// <returns></returns>
		private static int GetOutputsInfo (LayeredNeuralNetwork net, IEnumerable<TrainingSample> data, out float[,] gram, out List<float[]> allOuts)
		{
			List<float> outputs;
			allOuts = new List<float[]>();
			foreach (var sample in data)
			{
				var inputs = MatrixMath.ConvertToVector(sample.Data);
				net.Calculate(inputs);
				net.GetOutputs(out outputs);
				allOuts.Add(outputs.ToArray());
			}

			var m = MatrixMath.CreateFromRowsList(allOuts);

			// compute Gram matrix for output signals.
			var cols = MatrixMath.ConvertToColumnsList(m);
			for (int i = 0; i < cols.Count; i++)
			{
				cols[i] = VectorMath.NormalizeL2(cols[i]);
			}
			gram = MatrixMath.ComputeGramMatrix(cols);

			// calculate rank.
			return Numerics.Rank(m, RANK_EPS);
		}
		#endregion

		/// <summary>
		/// Runs EA for the artificial data generated for several bivariate gaussians (1, C).
		/// </summary>
		/// <param name="sender"></param>
		/// <param name="e"></param>
		private void Run2dTestBtn_Click(object sender, EventArgs e)
		{
			const float min = -10, max = -min;
			const int dim = 2;
			var size = (int)DistrCountNumeric.Value;

			//
			// Distributions.
			var distr = new List<GaussianDistribution>();
			for (int i = 0; i < size; i++)
			{
				var mu = VectorMath.CreateRandomVector(ContextRandom.rand, dim, min, max);
				var cov = MatrixMath.RandomCovariance(dim, ContextRandom.rand);
				var gauss = new GaussianDistribution(mu, cov);
				distr.Add(gauss);
			}

			//
			// generate random points using given distributions.
			var dataCount = 1000;
			trainData = new List<TrainingSample>();
			testData = new List<TrainingSample>();
			for (int i = 0; i < dataCount; i++)
			{
				var idx = ContextRandom.Next(size);
				var tempV = distr[idx].Next();
				var tempSample = new TrainingSample();
				tempSample.Data = MatrixMath.CreateFromVector(tempV, tempV.Length);
				trainData.Add(tempSample);

				idx = ContextRandom.Next(size);
				tempV = distr[idx].Next();
				tempSample = new TrainingSample();
				tempSample.Data = MatrixMath.CreateFromVector(tempV, tempV.Length);
				testData.Add(tempSample);
			}

			//
			// calculate Gram matrix for the normalized input data.
			var m = MachineLearningElements.ConvertToMatrix(trainData);
			var cols = MatrixMath.ConvertToColumnsList(m);
			for (int i = 0; i < cols.Count; i++) { cols[i] = VectorMath.NormalizeL2(cols[i]); }
			trainFeaturesGram = MatrixMath.ComputeGramMatrix(cols);

			var logFile = "2dtest"+size+".log";
			RunEA(logFile);

			// extend log-file by adding distributions params and test data points.
			using (var writer = new StreamWriter(logFile, true))
			{
				for (int i = 0; i < size; i++)
				{
					writer.WriteLine("\n> " + (i+1) + "-th distribution:");
					FileIO.WriteAllLines(writer, distr[i].ToStrings());
				}

				// compute autocorrelation for training data.
				var ac = MachineLearningElements.ComputeAutocorrelationMatrix(trainData);
				float[,] evec;
				float[] eval;
				Numerics.EigenMathNet.Eig(ac, out evec, out eval);
				writer.WriteLine("\n> Training data autocorrelation matrix:");
				FileIO.WriteAllLines(writer, MatrixMath.ConvertToRowsStringsList(ac, '\t'));
				writer.WriteLine("\n> Training data eigenvectors (by columns):");
				FileIO.WriteAllLines(writer, MatrixMath.ConvertToRowsStringsList(evec, '\t'));
				writer.WriteLine("\n> Training data eigenvalues:");
				writer.WriteLine(VectorMath.ConvertToString(eval, '\t'));

				writer.WriteLine("\n> Training data points:");
				FileIO.WriteAllLines(writer, MachineLearningElements.ConvertInputsToStrings(trainData));

				writer.WriteLine("\n> Test data points:");
				FileIO.WriteAllLines(writer, MachineLearningElements.ConvertInputsToStrings(testData));
			}
		}

		/// <summary>
		/// Runs EA for the artificial data generated for a bivariate gaussian (1, C).
		/// </summary>
		/// <param name="sender"></param>
		/// <param name="e"></param>
		private void Run2dTest1Btn_Click(object sender, EventArgs e)
		{
			const float min = -10, max = -min;
			const int size = 2;

			//
			// 1st distribution.
			var mu1 = VectorMath.CreateRandomVector(ContextRandom.rand, size, min, max);
			var cov1 = MatrixMath.RandomCovariance(size, ContextRandom.rand);
			var gauss1 = new GaussianDistribution(mu1, cov1);

			//
			// generate random points using given distributions.
			var dataCount = 1000;
			trainData = new List<TrainingSample>();
			testData = new List<TrainingSample>();
			for (int i = 0; i < dataCount; i++)
			{
				var tempSample = new TrainingSample();
				var tempV = gauss1.Next();
				tempSample.Data = MatrixMath.CreateFromVector(tempV, tempV.Length);
				trainData.Add(tempSample);

				tempSample = new TrainingSample();
				tempV = gauss1.Next();
				tempSample.Data = MatrixMath.CreateFromVector(tempV, tempV.Length);
				testData.Add(tempSample);
			}

			//
			// calculate Gram matrix for the normalized input data.
			var m = MachineLearningElements.ConvertToMatrix(trainData);
			var cols = MatrixMath.ConvertToColumnsList(m);
			for (int i = 0; i < cols.Count; i++) { cols[i] = VectorMath.NormalizeL2(cols[i]); }
			trainFeaturesGram = MatrixMath.ComputeGramMatrix(cols);

			var logFile = "2dtest1.log";
			RunEA(logFile);

			// extend log-file by adding distributions params and test data points.
			using (var writer = new StreamWriter(logFile, true))
			{
				writer.WriteLine("\n> Distribution:");
				FileIO.WriteAllLines(writer, gauss1.ToStrings());

				writer.WriteLine("\n> Training data points:");
				FileIO.WriteAllLines(writer, MachineLearningElements.ConvertInputsToStrings(trainData));

				writer.WriteLine("\n> Test data points:");
				FileIO.WriteAllLines(writer, MachineLearningElements.ConvertInputsToStrings(testData));
			}
		}
	}
}
