﻿using System;
using System.Collections.Generic;
using System.IO;
using System.Windows.Forms;
using Encog.Neural.Data.Basic;
using Encog.Neural.Networks;
using Encog.Neural.Networks.Training;
using Encog.Neural.Networks.Training.Propagation.Resilient;
using Encog.Neural.NeuralData;
using MentalAlchemy.Atomics;
using MentalAlchemy.Compounds;
using MentalAlchemy.Molecules;
using MentalAlchemy.Molecules.MachineLearning;

namespace SeparableNE
{
	public partial class MainForm : Form
	{
		private const int NET2_TRIALS = 10;
		private const string STANDARD = "Standard";
		private const string UNIFORM_BAGGING = "Uniform bagging";
		private const string BOOSTING = "AdaBoost.M1";

		private const string NO_REGULARIZER = "No regularizer";

		private List<TrainingSample> trainData;
		private List<TrainingSample> validData;
		private List<TrainingSample> testData;
		private LayeredNeuralNetwork net1;
		private BasicNetwork net2;
		private CombinedAnn combAnn;
		private Bagging bagging;
		private AdaBoostM1 boosting;

		public MainForm()
		{
			InitializeComponent();

			//
			// Init text for [neuralNetProperties1]
			var props = neuralNetProperties1.LayerProperties;
			int count = 1;
			foreach (var prop in props)
			{
				prop.Text = string.Format("Layer {0}", count);
				++count;
			}

			//
			// Init objective function combo-box.
			var funcs = CombinedAnn.FitnessFunctions.GetFitnessFunctions();
			foreach (var func in funcs) { ObjectiveFunctionCombo.Items.Add(func); }
			ObjectiveFunctionCombo.SelectedIndex = 0;

			//
			// Init bagging combo-box.
			BaggingCombo.Items.Add(STANDARD);
			BaggingCombo.Items.Add(UNIFORM_BAGGING);
			BaggingCombo.Items.Add(BOOSTING);
			BaggingCombo.SelectedIndex = 0;

			//
			// Init regularizer combo.
			var regs = Regularizers.GetRegularizers();
			RegularizerCombo.Items.Add(NO_REGULARIZER);
			RegularizerCombo.Items.AddRange(regs.ToArray());
			RegularizerCombo.SelectedIndex = 0;
		}

		private void LoadTrainDataBtn_Click(object sender, EventArgs e)
		{
			var res = openFileDialog1.ShowDialog();
			if (res == DialogResult.OK)
			{
				var filename = openFileDialog1.FileName;
				dataFileBox.Text = filename;
				MachineLearningElements.LoadProben1Data(filename, out trainData, out validData, out testData);

				//
				// set ANN1 properties.
				neuralNetProperties1.InputsCount = trainData[0].Data.GetLength(1);
				neuralNetProperties1.OutputsCount = (int)(trainData[0].Data.GetLength(1)*(float)AlphaNumeric.Value);
			}
		}

		private void TrainBtn_Click(object sender, EventArgs e)
		{
			// check data.
			if (!ValidateData())
			{
				MessageBox.Show("Load training data.");
				return;
			}

			#region - Algorithm implementation. -
			if (UseEvoPcaCheck.Checked)
			{	// run classification using EvoPCA.
				for (var t = 5f; t <= 50f; t += 5)
				{
					string name = "evopca_{0}.log";
					EvoPcaTraining(string.Format(name, t), t);
				}
			}
			else
			{
				var baggingType = (string)BaggingCombo.SelectedItem;
				for (float a = 0.5f; a <= 3; a += 0.5f)
				{
					AlphaNumeric.Value = (decimal)a;

					if (baggingType == STANDARD)
					{
						string name = "std_{0}.log";
						CombinedTraining(a, string.Format(name, a));
					}
					else if (baggingType == UNIFORM_BAGGING)
					{
						UniformBaggingTraining();
					}
					else if (baggingType == BOOSTING)
					{
						string name = "boosting_{0}.log";
						BoostingTraining(a, string.Format(name, a));
					}
					else
					{
						throw new Exception("Unrecognized bagging type.");
					}
				}
			}

			return;
			#endregion
		}

		private bool ValidateData ()
		{
			if (trainData == null) return false;
			if (validData == null) return false;
			if (testData == null) return false;
			
			return true;
		}

		/// <summary>
		/// Runs 10 (!!!) experiments
		/// </summary>
		private Stats TrainAnn2()
		{
			//
			// Load 1st ANN if it is not defined.
			if (net1 == null)
			{
				TrainBtn_Click(null, null);
			}

			//
			// Train 2nd neural network using the first network.
			INeuralDataSet samples = new BasicNeuralDataSet();
			for (int i = 0; i < trainData.Count; ++i)
			{
				var row = MatrixMath.GetRow(trainData[i].Data, 0);
				net1.Calculate(row);

				float[] outs;
				net1.GetOutputs(out outs);

				// convert data from floats to doubles and create new training samples.
				var outsD = VectorMath.ConvertToDoubles(outs);
				var outRow = MatrixMath.GetRow(trainData[i].Response, 0);
				var outRowD = VectorMath.ConvertToDoubles(outRow);
				samples.Add(new BasicNeuralData(outsD), new BasicNeuralData(outRowD));
			}

			var net2Props = new NeuralNetProperties();
			net2Props.nodesNumber = new[] { net1.OutputsNumber, trainData[0].Response.Length };
			net2Props.actFunctions = new ActivationFunction[] { ActivationFunctions.Sigmoid, ActivationFunctions.Linear};

			const int RUNS_NUMBER = 10;
			var lines = new List<string>();
			var ers = new List<float>();
			for (int i = 0; i < RUNS_NUMBER; ++i)
			{
				lines.Add(String.Format("Run #:\t{0}", (i + 1)));

				#region - Train. -
				net2 = MachineLearningElements.CreateEncogBasicNetwork(net2Props);
				ITrain train = new ResilientPropagation(net2, samples);

				//
				// divide training epochs into 10 steps to display
				// training progress.
				//
				int step = (int)Net2EpochsNumeric.Value / 10;
				for (int j = 0; j < 10; ++j)
				{
					int count = 0;
					do
					{
						train.Iteration();
						++count;
					} while (count < step);

					lines.Add(String.Format("{0}:\t{1}", j * step, train.Error));
					ResBox.Lines = lines.ToArray();
				}
				#endregion

				#region - Test. -
				float testErrorRate = TestCombinedNetwork();
				ers.Add(testErrorRate);
				lines.Add(String.Format("Error rate:\t{0}", testErrorRate));
				lines.Add("");
				#endregion
			}	// experiments iterations.

			#region - Process error rates from different experiments. -
			var stats = VectorMath.CalculateStats(ers.ToArray());
			lines.Add(stats.GetStatsString());
			lines.Add("");

			ResBox.Lines = lines.ToArray();

			//
			// write log.
			string filename = "results_" + DateTime.Now.ToShortDateString() + "_" + DateTime.Now.ToLongTimeString() + "_.log";
			filename = filename.Replace(':', '.');
			using (var writer = new StreamWriter(filename))
			{
				writer.WriteLine(String.Format("Data file:\t{0}", dataFileBox.Text));

				writer.Write(String.Format("Network architecture:\t -"));
				foreach (var layer in net2.LayerTags)
				{
					writer.Write(String.Format("{0} - ", layer.Value.NeuronCount));
				}
				writer.WriteLine("");
				writer.WriteLine("");

				foreach (string line in lines)
				{
					writer.WriteLine(line);
				}
			}

			#endregion

			return stats;
		}

		private float TestCombinedNetwork()
		{
			//
			// test combination of the 1-st and the second ANN.
			int erCount = 0;
			int size = testData.Count;
			for (int i = 0; i < size; ++i)
			{
				var row = MatrixMath.GetRow(testData[i].Data, 0);
				net1.Calculate(row);

				float[] outs;
				net1.GetOutputs(out outs);

				var outsD = VectorMath.ConvertToDoubles(outs);
				var input = new BasicNeuralData(outsD);
				var output = net2.Compute(input);

				var outRow = VectorMath.CreateFromDoubles(output.Data);
				var winner = VectorMath.IndexOfMax(outRow);
				var rowReq = MatrixMath.GetRow(testData[i].Response, 0);
				var reqWinner = VectorMath.FirstIndexOf(rowReq, 1.0f);

				if (winner != reqWinner)
				{
					++erCount;
				}
				//double dist = EuclidianMeasure.DistanceSqr(outputsT.GetRowVector(i), output.Data);
				//if (dist > 0.1) ++erCount;
			}

			//
			// todo: calculate confusion matrix.
			var testErrorRate = 100.0f * erCount / size;

			return testErrorRate;
		}

		#region - Training methods. -
		public void EvoPcaTraining(string filename, float t)
		{
			var a = 1f;	// default alpha.

			// 1. Set 1st and 2nd ANNs parameters.
			int inputsNumber = trainData[0].Data.Length, net1OutputsNumber = inputsNumber;
			var net1Props = new NeuralNetProperties();
			net1Props.nodesNumber = new int[] { inputsNumber, inputsNumber };
			net1Props.actFunctions = new ActivationFunction[] { ActivationFunctions.Identity, ActivationFunctions.Linear };
			net1Props.UseBias = false;
			var net2Props = new NeuralNetProperties();
			//net2Props.nodesNumber = new[] { net1OutputsNumber, trainData[0].Response.Length };
			//net2Props.actFunctions = new ActivationFunction[] { ActivationFunctions.Linear, ActivationFunctions.Sigmoid };
			net2Props.nodesNumber = new[] { net1OutputsNumber, net1OutputsNumber * 2, trainData[0].Response.Length };
			net2Props.actFunctions = new ActivationFunction[] { ActivationFunctions.Linear, ActivationFunctions.Sigmoid, ActivationFunctions.Sigmoid };

			// note: !!!
			const int RUNS_NUMBER = 10;

			var funcName = CombinedAnn.FitnessFunctions.VARIANCE_MAX;
			combAnn = new CombinedAnn();
			combAnn.Ann1FitnessFunction = CombinedAnn.FitnessFunctions.GetFitnessFunction(funcName);
			combAnn.EAParameters = eaPropertiesControl1.Parameters;
			combAnn.TrainData = trainData;
			combAnn.ValidationData = validData;
			combAnn.TestData = testData;
			combAnn.UnitedTraining = UnitedCheck.Checked;

			//var reg = Regularizers.GetRegularizer((string)RegularizerCombo.SelectedItem);
			var canns = new List<CombinedAnnTrainingRes>();
			var trainErs = new List<float>();
			var validErs = new List<float>();
			var ers = new List<float>();

			var annProps = CreateCombAnnProps(a);
			annProps.PropsNet1 = net1Props;	// override general properties by forced linear ANN1.
			//annProps.TrainData = trainData;
			//annProps.ValidationData = validData;
			//annProps.EAParams = eaPropertiesControl1.Parameters;
			//annProps.Epochs = (int)Net2EpochsNumeric.Value;
			//annProps.ObjFunction = combAnn.Ann1FitnessFunction;
			//annProps.PropsNet2 = net2Props;
			//annProps.Ann2Trials = NET2_TRIALS;
			//annProps.Regularizer = reg;
			//annProps.UnitedTraining = UnitedCheck.Checked;
			//annProps.UseEarlyStopping = EarlyStoppingCheck.Checked;

			var net1outs = new float[RUNS_NUMBER];
			var net2EpochsStats = new List<Stats>();
			for (int i = 0; i < RUNS_NUMBER; i++)
			{
				CombinedAnnTrainingRes res;
				res = CombinedAnnElements.TrainEvoPca(annProps, t);
				canns.Add(res);
				net2EpochsStats.Add(VectorMath.CalculateStats(res.Network2TrainingEpochs.ToArray()));
				net1outs[i] = res.Network1.OutputsNumber;

				//
				// evaluate best ANN-2 picked using the validation data set.
				combAnn.Network1 = res.Network1;
				combAnn.Network2 = res.Network2;
				var er = CombinedAnnElements.TestCombinedAnn(combAnn, trainData);
				trainErs.Add(er);
				er = CombinedAnnElements.TestCombinedAnn(combAnn, validData);
				validErs.Add(er);
				er = CombinedAnnElements.TestCombinedAnn(combAnn, testData);
				ers.Add(er);
			}
			var allErs = new List<List<float>>();
			allErs.Add(trainErs);
			allErs.Add(validErs);
			allErs.Add(ers);
			allErs.Add(new List<float>(net1outs));
			var ersM = MatrixMath.CreateFromColsList(allErs);
			var lines = MatrixMath.ConvertToRowsStringsList(ersM, '\t');
			lines.Insert(0, "> Threshold value:\t" + t);
			lines.Insert(1, "\n> Individual runs testing results:");
			lines.Insert(2, "Training er.\tValidation er.\tTest error\tNet1 outputs #");
			lines.Add("\nIndividual runs stats (training, validation, test):");
			lines.Add(VectorMath.CalculateStats(trainErs.ToArray()).GetStatsString());
			lines.Add(VectorMath.CalculateStats(validErs.ToArray()).GetStatsString());
			lines.Add(VectorMath.CalculateStats(ers.ToArray()).GetStatsString());

			lines.Add("\n> ANN2 training epochs stats:");
			lines.AddRange(StructMath.ConvertToStringsList(net2EpochsStats, false));

			// write [stats].
			var stats = CombinedAnnElements.TestTrainedCombinedAnns(canns, combAnn.ClassIndex, testData);
			var statLines = StructMath.ConvertToStringsList(stats, true);
			lines.Add("");
			lines.Add("Testing stats:");
			lines.AddRange(statLines);

			ResBox.Lines = lines.ToArray();

			WriteResults(filename, lines, net1Props, funcName);
		}

		/// <summary>
		/// Combined training of the ANN.
		/// </summary>
		public void CombinedTraining (float a, string filename)
		{
			// 1. Set 1st and 2nd ANNs parameters.
			int inputsNumber = trainData[0].Data.Length, net1OutputsNumber = (int)(inputsNumber * a);
			var net1Props = neuralNetProperties1.NeuralProperties;
			var net2Props = new NeuralNetProperties();
			net2Props.nodesNumber = new[] { net1OutputsNumber, trainData[0].Response.Length };
			net2Props.actFunctions = new ActivationFunction[] { ActivationFunctions.Linear, ActivationFunctions.Sigmoid};

			// note: !!!
			const int RUNS_NUMBER = 10;

			var funcName = (string)ObjectiveFunctionCombo.SelectedItem;
			combAnn = new CombinedAnn();
			combAnn.Ann1FitnessFunction = CombinedAnn.FitnessFunctions.GetFitnessFunction(funcName);
			combAnn.EAParameters = eaPropertiesControl1.Parameters;
			combAnn.TrainData = trainData;
			combAnn.ValidationData = validData;
			combAnn.TestData = testData;
			combAnn.UnitedTraining = UnitedCheck.Checked;

			//var reg = Regularizers.GetRegularizer((string)RegularizerCombo.SelectedItem);
			var canns = new List<CombinedAnnTrainingRes>();
			var trainErs = new List<float>();
			var validErs = new List<float>();
			var ers = new List<float>();

			var annProps = CreateCombAnnProps(a);
			//var annProps = new CombinedAnnProperties();
			//annProps.TrainData = trainData;
			//annProps.ValidationData = validData;
			//annProps.EAParams = eaPropertiesControl1.Parameters;
			//annProps.PropsNet1 = net1Props;
			//annProps.Epochs = (int)Net2EpochsNumeric.Value;
			//annProps.ObjFunction = combAnn.Ann1FitnessFunction;
			//annProps.PropsNet2 = net2Props;
			//annProps.Ann2Trials = NET2_TRIALS;
			//annProps.Regularizer = reg;
			//annProps.UnitedTraining = UnitedCheck.Checked;
			//annProps.UseEarlyStopping = EarlyStoppingCheck.Checked;

			for (int i = 0; i < RUNS_NUMBER; i++)
			{
				CombinedAnnTrainingRes res;
				if (!AdvSelectionCheck.Checked)
				{
					if (!combAnn.UnitedTraining)
					{
						res = CombinedAnnElements.Train(annProps);
					}
					else
					{
						res = CombinedAnnElements.TrainUni(annProps);
					}
				}
				else
				{
					res = CombinedAnnElements.TrainAdvSelection(trainData, validData, net1Props, eaPropertiesControl1.Parameters,
											  combAnn.Ann1FitnessFunction, net2Props,
											  (int)Net2EpochsNumeric.Value,
											  NET2_TRIALS, annProps.Regularizer);
				}
				canns.Add(res);

				//
				// evaluate best ANN-2 picked using the validation data set.
				combAnn.Network1 = res.Network1;
				combAnn.Network2 = res.Network2;
				var er = CombinedAnnElements.TestCombinedAnn(combAnn, trainData);
				trainErs.Add(er);
				er = CombinedAnnElements.TestCombinedAnn(combAnn, validData);
				validErs.Add(er);
				er = CombinedAnnElements.TestCombinedAnn(combAnn, testData);
				ers.Add(er);
			}
			var allErs = new List<List<float>>();
			allErs.Add(trainErs);
			allErs.Add(validErs);
			allErs.Add(ers);
			var ersM = MatrixMath.CreateFromColsList(allErs);
			//var lines = new List<string> (VectorMath.ConvertToStringsArray(ers.ToArray()));
			var lines = MatrixMath.ConvertToRowsStringsList(ersM, '\t');
			lines.Insert(0, "Individual runs testing results:");
			lines.Insert(1, "Training er.\tValidation er.\tTest error");
			lines.Add("Individual runs stats (training, validation, test):");
			lines.Add(VectorMath.CalculateStats(trainErs.ToArray()).GetStatsString());
			lines.Add(VectorMath.CalculateStats(validErs.ToArray()).GetStatsString());
			lines.Add(VectorMath.CalculateStats(ers.ToArray()).GetStatsString());

			// write [stats].
			var stats = CombinedAnnElements.TestTrainedCombinedAnns(canns, combAnn.ClassIndex, testData);
			var statLines = StructMath.ConvertToStringsList(stats, true);
			lines.Add("");
			lines.Add("Testing stats:");
			lines.AddRange(statLines);

			ResBox.Lines = lines.ToArray();

			WriteResults(filename, lines, net1Props, funcName);
		}

		public void BaggingTraining ()
		{	// perform training of several ANNs using different objective functions.
			// todo: add a parameter to define how many ANNs should be trained for each objective function (a kind of 'ANN packs' number).
			// todo: implement bagging with bootstrapping.

			// 1. Fill-in 1st and 2nd ANNs parameters.
			int inputsNumber = trainData[0].Data.Length, net1OutputsNumber = (int)(inputsNumber * AlphaNumeric.Value);
			var net1Props = neuralNetProperties1.NeuralProperties;
			var net2Props = new NeuralNetProperties();
			net2Props.nodesNumber = new[] { net1OutputsNumber, trainData[0].Response.Length };
			net2Props.actFunctions = new ActivationFunction[] { ActivationFunctions.Sigmoid, ActivationFunctions.Linear };

			//
			// 2. Traing Combined ANNs.
			bagging = new Bagging();

			var ers = new List<float>();
			var stats = new List<CombinedAnnTrainingRes>();
			var objFuncs = CombinedAnn.FitnessFunctions.GetFitnessFunctions();
			var props = CreateCombAnnProps((float)AlphaNumeric.Value);

			foreach (var func in objFuncs)
			{
				if (AlphaNumeric.Value >= 1 && func == CombinedAnn.FitnessFunctions.CORRELATION_MIN)
				{
					continue;
				}
				if (AlphaNumeric.Value < 1 && func == CombinedAnn.FitnessFunctions.CORRELATION_MAX)
				{	// decrease features dimensionality.
					continue;
				}

				combAnn = new CombinedAnn();
				combAnn.EAParameters = eaPropertiesControl1.Parameters;
				combAnn.TrainData = trainData;
				combAnn.ValidationData = validData;
				combAnn.TestData = testData;

				combAnn.Ann1FitnessFunction = CombinedAnn.FitnessFunctions.GetFitnessFunction(func);

				var res = CombinedAnnElements.Train(props);
				stats.Add(res);

				combAnn.Network1 = res.Network1;
				combAnn.Network2 = res.Network2;
				bagging.Classifiers.Add(combAnn);

				var er = CombinedAnnElements.TestCombinedAnn(combAnn, testData);
				ers.Add(er);
			}
			var lines = new List<string> (VectorMath.ConvertToStringsArray(ers.ToArray()));
			lines.Insert(0, "Individual combined networks errors:");
			lines.Insert(1, "");

			//
			// 3. Perform testing of the bagging.
			bagging.UseVotes = false;
			var bagRes = MachineLearningElements.TestAlgorithm(bagging, testData);

			//
			// 4. Calculate testing error for bagging.
			var confMatrix = MachineLearningElements.CalculateConfusionMatrix(testData, bagRes);
			var ersCount = MatrixMath.SumIgnoreDiag(confMatrix);
			lines.Add("Bagging classification error:");
			lines.Add(string.Format("{0}", (float)ersCount/testData.Count));

			ResBox.Lines = lines.ToArray();
		}

		/// <summary>
		/// Perform bagging with the same objective function.
		/// </summary>
		public void UniformBaggingTraining()
		{
			// todo: add a parameter to define how many ANNs should be trained for each objective function (a kind of 'ANN packs' number).
			// todo: implement bagging with bootstrapping.

			// 1. Fill-in 1st and 2nd ANNs parameters.
			int inputsNumber = trainData[0].Data.Length, net1OutputsNumber = (int)(inputsNumber * AlphaNumeric.Value);
			var net1Props = neuralNetProperties1.NeuralProperties;
			var net2Props = new NeuralNetProperties();
			net2Props.nodesNumber = new[] { net1OutputsNumber, trainData[0].Response.Length };
			net2Props.actFunctions = new ActivationFunction[] { ActivationFunctions.Sigmoid, ActivationFunctions.Linear };

			//
			// 2. Traing Combined ANNs.
			var intForm = new IntInputForm();
			intForm.LabelText = "Committee size";
			intForm.ShowDialog();
			var comSize = intForm.NumericValue;

			bagging = new Bagging();

			var reg = Regularizers.GetRegularizer((string)RegularizerCombo.SelectedItem);
			var ers = new List<float>();
			var stats = new List<CombinedAnnTrainingRes>();
			var func = CombinedAnn.FitnessFunctions.GetFitnessFunction((string)ObjectiveFunctionCombo.SelectedItem);
			var props = CreateCombAnnProps((float) AlphaNumeric.Value);

			for (int i = 0; i < comSize; i++)
			{
				combAnn = new CombinedAnn();
				combAnn.EAParameters = eaPropertiesControl1.Parameters;
				combAnn.TrainData = trainData;
				combAnn.ValidationData = validData;
				combAnn.TestData = testData;
				combAnn.Ann1FitnessFunction = func;

				CombinedAnnTrainingRes res;
				if (!AdvSelectionCheck.Checked)
				{
					res = CombinedAnnElements.Train(props);
				}
				else
				{
					res = CombinedAnnElements.TrainAdvSelection(trainData, validData, net1Props, eaPropertiesControl1.Parameters,
											  combAnn.Ann1FitnessFunction, net2Props,
											  (int)Net2EpochsNumeric.Value,
											  NET2_TRIALS, reg);
				}
				stats.Add(res);

				combAnn.Network1 = res.Network1;
				combAnn.Network2 = res.Network2;
				bagging.Classifiers.Add(combAnn);

				var er = CombinedAnnElements.TestCombinedAnn(combAnn, testData);
				ers.Add(er);
			}
			var lines = new List<string>(VectorMath.ConvertToStringsArray(ers.ToArray()));
			lines.Insert(0, "Individual combined networks errors:");
			lines.Insert(1, "");

			//
			// 3. Perform testing of the bagging.
			bagging.UseVotes = false;
			var bagRes = MachineLearningElements.TestAlgorithm(bagging, testData);

			//
			// 4. Calculate testing error for bagging.
			var confMatrix = MachineLearningElements.CalculateConfusionMatrix(testData, bagRes);
			var ersCount = MatrixMath.SumIgnoreDiag(confMatrix);
			lines.Add("Bagging classification error:");
			lines.Add(string.Format("{0}", (float)ersCount / testData.Count));

			ResBox.Lines = lines.ToArray();
		}

		/// <summary>
		/// Perform training using AdaBoost.M1.
		/// </summary>
		public void BoostingTraining(float alpha, string filename)
		{
			// 1. Fill-in 1st and 2nd ANNs parameters.
			int inputsNumber = trainData[0].Data.Length, net1OutputsNumber = (int)(inputsNumber * alpha);
			var net1Props = neuralNetProperties1.NeuralProperties;
			var net2Props = new NeuralNetProperties();
			net2Props.nodesNumber = new[] { net1OutputsNumber, trainData[0].Response.Length };
			net2Props.actFunctions = new ActivationFunction[] { ActivationFunctions.Sigmoid, ActivationFunctions.Linear };

			// define number of networks.
			//var intForm = new IntInputForm();
			//intForm.LabelText = "Committee size";
			//intForm.NumericCntrl.Value = 5;
			//intForm.ShowDialog();
			//var comSize = intForm.NumericValue;
			var comSize = 5;

			//
			// create combined ANN.
			combAnn = new CombinedAnn();
			combAnn.EAParameters = eaPropertiesControl1.Parameters;
			combAnn.TrainData = trainData;
			combAnn.ValidationData = validData;
			combAnn.TestData = testData;
			combAnn.Ann1FitnessFunction = CombinedAnn.FitnessFunctions.GetFitnessFunction((string)ObjectiveFunctionCombo.SelectedItem);
			combAnn.MinimizeFitness = combAnn.Ann1FitnessFunction.MinimizeFitness;
			combAnn.Network1 = LayeredNeuralNetwork.CreateNetwork(net1Props);
			combAnn.Network2 = MachineLearningElements.CreateEncogBasicNetwork(net2Props);
			combAnn.TrainingParameters = new CombinedAnn.Parameters { Ann1Properties = net1Props, Ann2Properties = net2Props,
																		Ann2Trials = NET2_TRIALS, EpochsCount = (int)Net2EpochsNumeric.Value,
																		Regularizer = Regularizers.GetRegularizer((string)RegularizerCombo.SelectedItem)};
			
			//
			// Perform 10 runs
			const int RUNS_COUNT = 10;
			var ers = new List<float>();
			var clCount = new List<float>();
			for (int i = 0; i < RUNS_COUNT; ++i )
			{
				//
				// 2. Traing Combined ANNs.
				boosting = new AdaBoostM1();
				boosting.TrainIterations = (int)comSize;
				boosting.WeakLearner = combAnn;
				boosting.SamplesDistribution = VectorMath.Mul(VectorMath.Ones(trainData.Count), 1f / trainData.Count);
				boosting.Train(trainData);

				clCount.Add(boosting.TrainedClassifier.Classifiers.Count);

				var testRes = MachineLearningElements.TestAlgorithm(boosting, testData);
				var confM = MachineLearningElements.CalculateConfusionMatrix(testData, testRes);
				var ersCount = MatrixMath.SumIgnoreDiag(confM);

				ers.Add((float)ersCount / testData.Count);
			}
			var erStats = VectorMath.CalculateStats(ers.ToArray());

			var lines = new List<string>();
			lines.Add("Boosting testing error:");

			//
			// prepare information for logging.
			lines.Add("Run #\tTest er.\tCom. size");
			for (int i = 0; i < RUNS_COUNT; i++)
			{
				lines.Add(string.Format("{0}\t{1}\t{2}", (i+1), ers[i], clCount[i]));
			}

			lines.Add("");
			lines.Add("Several runs stats:");
			lines.Add(erStats.GetStatsHeader());
			lines.Add(erStats.GetStatsString());

			ResBox.Lines = lines.ToArray();

			WriteResults(filename, lines, net1Props, (string)ObjectiveFunctionCombo.SelectedItem);
		}
		#endregion

		#region - Fitness functions. -
		/// <summary>
		/// Fitness function based upon correlation between nodes outputs.
		/// </summary>
		/// <param name="w"></param>
		/// <returns></returns>
		public float FitnessFunctionCorrelation(float[] w)
		{
			net1.SetConnectionWeights(new List<float>(w));

			//
			// get vectors of signals from output nodes.
			int size = trainData.Count;
			var allOuts = new List<float[]>();
			for (var i = 0; i < size; i++)
			{
				var row = MatrixMath.GetRow(trainData[i].Data, 0);
				net1.Calculate(row);
				float[] outs;
				net1.GetOutputs(out outs);

				allOuts.Add(outs);
			}

			//
			// get list of column vectors.
			var tempM = MatrixMath.CreateFromRowsList(allOuts);
			var cols = MatrixMath.ConvertToColumnsList(tempM);

			//
			// calculate matrix of absolute correlation coefficients.
			var cor = MatrixMath.ComputeCorrelationMatrix(cols);
			var corabs = MatrixMath.Abs(cor);
            
			//
			// calculate error as mean value in the matrix of absolute correlation coefficients.
			var error = MatrixMath.Mean(corabs);
			return error;
		}

		/// <summary>
		/// Evaluate given vector of weights by the average error of neural networks.
		/// </summary>
		/// <param name="w"></param>
		public float FitnessFunctionAnnTrain(float[] w)
		{
			// copy individual genes into [Vector].
			net1.SetConnectionWeights(new List<float>(w));

			#region - Train neural network using this small training set. -
			const int TRAIN_EPOCHS = 50;
			int outputsNumber = trainData[0].Response.Length;
			var net2Props = new NeuralNetProperties();
			net2Props.nodesNumber = new[] { net1.OutputsNumber, outputsNumber };
			net2Props.actFunctions = new ActivationFunction[] { ActivationFunctions.Sigmoid, ActivationFunctions.Linear };
			net2 = MachineLearningElements.CreateEncogBasicNetwork(net2Props);

			#region - Create set of training samples. -
			INeuralDataSet trainingSet = new BasicNeuralDataSet();
			int trainCount = trainData.Count;
			for (int j = 0; j < trainCount; ++j)
			{
				var row = MatrixMath.GetRow(trainData[j].Data, 0);
				net1.Calculate(row);
				float[] outs;
				net1.GetOutputs(out outs);

				var outsD = VectorMath.ConvertToDoubles(outs);
				var outsCorRow = MatrixMath.GetRow(trainData[j].Response, 0);
				var outsCorD = VectorMath.ConvertToDoubles(outsCorRow);
				trainingSet.Add(new BasicNeuralData(outsD), new BasicNeuralData(outsCorD));
			}
			#endregion

			const int trainTrials = 3;
			var er = 0.0f;
			for (int i = 0; i < trainTrials; ++i)
			{
				net2.Reset();
				ITrain train = new ResilientPropagation(net2, trainingSet);
				int epoch = 1;

				do
				{
					train.Iteration();
					epoch++;
				} while (epoch < TRAIN_EPOCHS);

				if (er < train.Error)
				{	// er is a maximal obtained error.
					er = (float)train.Error;
				}
				//er += train.Error;
			}
			#endregion

			return er / trainTrials;
		} 
		#endregion

		#region - Utility methods. -
		public void WriteResults (string filename, List<string> lines, NeuralNetProperties net1Props, string funcName)
		{
			using (var writer = new StreamWriter(filename))
			{
				var nodesStr = VectorMath.ConvertToString(net1Props.nodesNumber, '-');

				writer.WriteLine(dataFileBox.Text);
				writer.WriteLine();
				writer.WriteLine(string.Format("ANN1:\t{0}", nodesStr));
				writer.WriteLine();
				writer.WriteLine("EA parameters:");
				EAElements.Write(writer, eaPropertiesControl1.Parameters);
				writer.WriteLine(string.Format("Fitness function:\t{0}", funcName));
				writer.WriteLine(string.Format("Regularizer:\t{0}", RegularizerCombo.SelectedItem));
				writer.WriteLine(string.Format("Activation function:\t{0}", net1Props.actFunctions[net1Props.actFunctions.Length-1].Method.Name));
				writer.WriteLine();
				writer.WriteLine(string.Format("ANN2 training epochs:\t{0}", Net2EpochsNumeric.Value));
				writer.WriteLine();
				FileIO.WriteAllLines(writer, lines);
			}
		}

		public CombinedAnnProperties CreateCombAnnProps (float a)
		{
			int inputsNumber = trainData[0].Data.Length, net1OutputsNumber = (int)(inputsNumber * a);
			var net1Props = neuralNetProperties1.NeuralProperties;
			var net2Props = new NeuralNetProperties();
			net2Props.nodesNumber = new[] { net1OutputsNumber, trainData[0].Response.Length };
			//net2Props.actFunctions = new ActivationFunction[] { ActivationFunctions.Sigmoid, ActivationFunctions.Linear };
			net2Props.actFunctions = new ActivationFunction[] { ActivationFunctions.Linear, ActivationFunctions.Sigmoid };

			var annProps = new CombinedAnnProperties();
			annProps.TrainData = trainData;
			annProps.ValidationData = validData;
			annProps.EAParams = eaPropertiesControl1.Parameters;
			annProps.PropsNet1 = net1Props;
			annProps.Epochs = (int)Net2EpochsNumeric.Value;
			annProps.ObjFunction = combAnn.Ann1FitnessFunction;
			annProps.PropsNet2 = net2Props;
			annProps.Ann2Trials = NET2_TRIALS;
			annProps.Regularizer = Regularizers.GetRegularizer((string)RegularizerCombo.SelectedItem);
			annProps.UnitedTraining = UnitedCheck.Checked;
			annProps.UseEarlyStopping = EarlyStoppingCheck.Checked;

			return annProps;
		}
		#endregion

		private void AlphaNumeric_ValueChanged(object sender, EventArgs e)
		{
			neuralNetProperties1.OutputsCount = (int)(neuralNetProperties1.InputsCount * (float)AlphaNumeric.Value);
		}
	}
}
