﻿using System;
using System.Collections.Generic;
using Encog.Neural.Data.Basic;
using Encog.Neural.Networks;
using Encog.Neural.Networks.Layers;
using Encog.Neural.NeuralData;
using MentalAlchemy.Atomics;

namespace MentalAlchemy.Molecules.MachineLearning
{
	/// <summary>
	/// [molecule]
	/// 
	/// Class for the combined ANN.
	/// </summary>
	[Serializable]
	public class CombinedAnn : IClassifier
	{
		#region - Embed classes. -
		public class FitnessFunctions
		{
			public const string CORRELATION_MAX = "Correlation (max)";
			public const string CORRELATION_MIN = "Correlation (min)";
			public const string ERROR_MIN = "Error (min)";
			public const string GRAM_REG_MAX = "Reg. Gram matrix (max)";
			public const string GRAM_MAX = "Gram matrix (max)";
			public const string GRAM_MIN = "Gram matrix (min)";
			public const string GRAM_VARIANCE_MIN = "Gram matrix + variance (min)";
			public const string GRAM_DETRANK_MAX = "Gram matrix rank + det (max)";
			public const string RANK_MAX = "Matrix rank (max)";
			public const string VARIANCE_MAX = "ANN outputs variance (max)";

			public static string[] GetFitnessFunctions ()
			{
				var res = new List<string>();
				res.Add(CORRELATION_MAX);
				res.Add(CORRELATION_MIN);
				res.Add(ERROR_MIN);
				res.Add(GRAM_REG_MAX);
				res.Add(GRAM_MAX);
				res.Add(GRAM_MIN);
				res.Add(GRAM_VARIANCE_MIN);
				res.Add(GRAM_DETRANK_MAX);
				res.Add(RANK_MAX);
				res.Add(VARIANCE_MAX);

				return res.ToArray();
			}

			public static NEObjFunction GetFitnessFunction(string name)
			{
				if (name == CORRELATION_MAX) { return new CorrelationObjFunction { MinimizeFitness = false }; }
				if (name == CORRELATION_MIN) { return new CorrelationObjFunction { MinimizeFitness = true }; }
				if (name == GRAM_REG_MAX) { return new RegGramMatrixObjFunction { MinimizeFitness = false }; }
				if (name == GRAM_MAX) { return new GramMatrixObjFunction { MinimizeFitness = false }; }
				if (name == GRAM_MIN) { return new GramMatrixObjFunction { MinimizeFitness = true }; }
				if (name == GRAM_VARIANCE_MIN) { return new GramMatrixVarianceObjFunction { MinimizeFitness = true }; }
				if (name == GRAM_DETRANK_MAX) { return new GramDetRankObjFunction { MinimizeFitness = false }; }
				if (name == RANK_MAX) { return new RankObjFunction { MinimizeFitness = false }; }
				if (name == ERROR_MIN) { return new MinErrorObjFunction { MinimizeFitness = true }; }
				if (name == VARIANCE_MAX) { return new OutputsVarianceObjFunction { MinimizeFitness = false }; }

				throw new Exception(string.Format("[CombinedAnn.FitnessFunctions.GetFitnessFunction] Unrecognized fitness function name: {0}", name));
			}
		}

		public class Parameters
		{
			public int EpochsCount { get; set; }
			public int Ann2Trials { get; set; }
			public NeuralNetProperties Ann1Properties { get; set; }
			public NeuralNetProperties Ann2Properties { get; set; }
			public Regularizer Regularizer { get; set; }
		}
		#endregion

		protected LayeredNeuralNetwork net1;
		protected BasicNetwork net2;
		protected Dictionary<int, int> classIndex;
		protected List<TrainingSample> tData; 

		#region - Public properties. -
		/// <summary>
		/// Parameters for combined ANN training.
		/// </summary>
		public Parameters TrainingParameters { get; set; }

		/// <summary>
		/// Data for the combined ANN training.
		/// </summary>
		public List<TrainingSample> TrainData
		{
			get
			{
				return tData;
			}
			set
			{
				tData = value;
				classIndex = MachineLearningElements.FillResponses(tData);
			}
		}

		/// <summary>
		/// Sets correspondence between ANN output and the class ID.
		/// </summary>
		public Dictionary<int, int> ClassIndex
		{
			get { return classIndex; }
			set { classIndex = value;}
		}

		/// <summary>
		/// Data for ANN-2 testing.
		/// </summary>
		public List<TrainingSample> ValidationData { get; set; }
		
		/// <summary>
		/// Data for the combined ANN testing.
		/// </summary>
		public List<TrainingSample> TestData { get; set; }

		/// <summary>
		/// ANN-1.
		/// </summary>
		public LayeredNeuralNetwork Network1 
		{ 
			get { return net1; }
			set { net1 = (LayeredNeuralNetwork)value.Clone(); }
		}

		/// <summary>
		/// ANN-2.
		/// </summary>
		public BasicNetwork Network2
		{
			get { return net2; }
			set { net2 = (BasicNetwork)value.Clone(); }
		}

		/// <summary>
		/// Fitness function for the ANN-1 training.
		/// </summary>
		public NEObjFunction Ann1FitnessFunction { get; set; }
		/// <summary>
		/// Parameters for the ANN-1 training.
		/// </summary>
		public EAParameters EAParameters { get; set; }

		/// <summary>
		/// Indicates whether objective function is to minimized or maximized.
		/// </summary>
		public bool MinimizeFitness
		{
			get { return FitnessComparator.MinimizeFitness; }
			set { FitnessComparator.MinimizeFitness = value; }
		}

		/// <summary>
		/// Indicates whether ANN-1 weights are tuned during ANN-2 training.
		/// </summary>
		public bool UnitedTraining { get; set; }

		/// <summary>
		/// Results of the training. Filled only if Train method is called. Otherwise is null.
		/// </summary>
		public CombinedAnnTrainingRes TrainingResults { get; set; }
		#endregion

		#region - Construction. -
		public CombinedAnn () {}
		public CombinedAnn(CombinedAnn ann)
		{
			net1 = (LayeredNeuralNetwork)ann.net1.Clone();
			net2 = (BasicNetwork)ann.net2.Clone();
			classIndex = new Dictionary<int, int>(ann.classIndex);
			tData = new List<TrainingSample>(ann.tData);
			ValidationData = new List<TrainingSample>(ann.ValidationData);
			TestData = new List<TrainingSample>(ann.ValidationData);
			Ann1FitnessFunction = ann.Ann1FitnessFunction;
			MinimizeFitness = ann.MinimizeFitness;
			EAParameters = ann.EAParameters;
			TrainingParameters = ann.TrainingParameters;
			UnitedTraining = ann.UnitedTraining;
		}
		#endregion

		#region - Recognition. -
		/// <summary>
		/// Performs training of the Combined ANN.
		/// </summary>
		/// <param name="trainData"></param>
		public void Train(List<TrainingSample> trainData)
		{
			classIndex = MachineLearningElements.FillResponses(TrainData);
			var annProps = new CombinedAnnProperties();
			annProps.TrainData = trainData;
			annProps.ValidationData = ValidationData;
			annProps.EAParams = EAParameters;
			annProps.PropsNet1 = TrainingParameters.Ann1Properties;
			annProps.Epochs = TrainingParameters.EpochsCount;
			annProps.ObjFunction = Ann1FitnessFunction;
			annProps.PropsNet2 = TrainingParameters.Ann2Properties;
			annProps.Ann2Trials = TrainingParameters.Ann2Trials;
			annProps.Regularizer = TrainingParameters.Regularizer;
			annProps.UnitedTraining = UnitedTraining;

			TrainingResults = CombinedAnnElements.Train(annProps);
			Network1 = TrainingResults.Network1;
			Network2 = TrainingResults.Network2;
		}

		/// <summary>
		/// Performs recognition of the given object and returns class index (not class ID!!!).
		/// </summary>
		/// <param name="obj">Object's description.</param>
		/// <returns>Class index.</returns>
		public int Recognize (float[,] obj)
		{
			if (!UnitedTraining)
			{
				var outs = MachineLearningElements.GetOutputs(net1, obj);

				// convert data from floats to doubles and create new training samples.
				var outsD = VectorMath.ConvertToDoubles(outs.ToArray());
				var input = new BasicNeuralData(outsD);
				var output = net2.Compute(input);
				var outRow = VectorMath.CreateFromDoubles(output.Data);
				var winnerIdx = VectorMath.IndexOfMax(outRow);

				return classIndex != null ? classIndex[winnerIdx] : winnerIdx;
			}
			else
			{	// recognize object using ANN-2 only.
				//
				// Create inpute vector for ANN-2.
				var inp = MatrixMath.ConvertToVector(obj);
				var inpD = VectorMath.ConvertToDoubles(inp);
				var x = new BasicNeuralData(inpD);
				var outp = net2.Compute(x);

				var winnerIdx = VectorMath.IndexOfMax(outp.Data);
				return classIndex != null ? classIndex[winnerIdx] : winnerIdx;
			}
		}

		/// <summary>
		/// [molecule]
		/// 
		/// Returns class votes calculated proportionally to the minimal output value normalized by the SoftMax function.
		/// </summary>
		/// <param name="obj">Object's description.</param>
		/// <returns>Dictionary with class votes.</returns>
		public Dictionary<int, int> GetClassVotes(float[,] obj)
		{
			var outs = MachineLearningElements.GetOutputs(net1, obj);

			// convert data from floats to doubles.
			var outsD = VectorMath.ConvertToDoubles(outs.ToArray());
			var input = new BasicNeuralData(outsD);
			var output = net2.Compute(input);
			var outRow = VectorMath.CreateFromDoubles(output.Data);

			var expOut = new float[outRow.Length];
			for (int i = 0; i < expOut.Length; i++) { expOut[i] = (float)Math.Exp(outRow[i]); }
			var expMin_1 = 1f / VectorMath.Min(expOut);

			var res = new Dictionary<int, int>();
			for (int i = 0; i < expOut.Length; i++)
			{
				var rate = (int)(expOut[i]*expMin_1);
				res.Add(classIndex[i], rate);
			}

			return res;
		}

		/// <summary>
		/// [molecule]
		/// 
		/// Returns class probabilities calculated using SoftMax function.
		/// </summary>
		/// <param name="obj">Object's description.</param>
		/// <returns>Dictionary with class probabilities.</returns>
		public Dictionary<int, float> GetClassProbabilities(float[,] obj)
		{
			var outs = MachineLearningElements.GetOutputs(net1, obj);

			// convert data from floats to doubles.
			var outsD = VectorMath.ConvertToDoubles(outs.ToArray());
			var input = new BasicNeuralData(outsD);
			var output = net2.Compute(input);
			var outRow = VectorMath.CreateFromDoubles(output.Data);

			var expOut = new float[outRow.Length];
			for (int i = 0; i < expOut.Length; i++) {expOut[i] = (float)Math.Exp(outRow[i]);}
			var expSum_1 = 1f / VectorMath.Sum(expOut);

			var res = new Dictionary<int, float>();
			for (int i = 0; i < expOut.Length; i++)
			{
				res.Add(classIndex[i], expOut[i] * expSum_1);
			}

			return res;
		}

		#endregion

		public object Clone () {return new CombinedAnn(this);}
	}

	public class CombinedAnnProperties
	{
		public List<TrainingSample> TrainData;
		public List<TrainingSample> ValidationData;
		public NeuralNetProperties PropsNet1;
		public EAParameters EAParams;
		public NEObjFunction ObjFunction;
		public NeuralNetProperties PropsNet2;
		public int Epochs;
		public int Ann2Trials;
		public Regularizer Regularizer;
		public bool UnitedTraining;
		public bool UseEarlyStopping;
	}

	/// <summary>
	/// [molecule]
	/// 
	/// Class to house various methods for the Combined training of ANN.
	/// </summary>
	public class CombinedAnnElements
	{
		#region - Training. -
		/// <summary>
		/// [molecule]
		/// 
		/// Train combined ANN.
		/// note: The problem (?) is that classIndex array, which defines correspondence between ANN outputs and class indices, is not set.
		/// </summary>
		/// <param name="annProps">Combined ANN properties.</param>
		public static CombinedAnnTrainingRes Train(CombinedAnnProperties annProps)
		{
			if (annProps.TrainData == null) throw new Exception("[TrainAnn1] error: Undefined training data.");

			var res = new CombinedAnnTrainingRes();
			annProps.ObjFunction.TrainData = annProps.TrainData;

			//
			// 1. Train 1st ANN.
			LayeredNeuralNetwork net1;
			res.Ann1TrainingStats = TrainAnn1(annProps.TrainData, annProps.PropsNet1, annProps.EAParams, annProps.ObjFunction, out net1);
			res.Network1 = net1;
			res.Networks2 = new List<BasicNetwork>();

			//
			// 2. Train 2nd ANN to classify data.
			var minValError = float.MaxValue;
			var combAnn = new CombinedAnn();
			combAnn.TrainData = annProps.TrainData;
			combAnn.Network1 = net1;
			combAnn.UnitedTraining = annProps.UnitedTraining;
			for (int i = 0; i < annProps.Ann2Trials; i++)
			{
				BasicNetwork net2;
				var ann2stats = TrainAnn2(annProps.TrainData, res.Network1, annProps.PropsNet2, annProps.Epochs, out net2);

				//
				// test obtained ANN-2.
				combAnn.Network2 = net2;
				var er = TestCombinedAnn(combAnn, annProps.ValidationData);
				res.Networks2.Add(net2);

				//
				// correct [er] using L2 norm for ANN-2 weights matrix.
				if (annProps.Regularizer != null)
				{
					var w = MachineLearningElements.GetWeights(net2);
					er += annProps.Regularizer(w);
				}

				//
				// if the resulting error is minimal then save current ANN-2.
				if (minValError > er)
				{
					minValError = er;
					res.Ann2TrainingStats = ann2stats;
					res.Network2 = net2;
				}
			}

			res.AnnProps = annProps;
			return res;
		}

		/// <summary>
		/// [molecule]
		/// 
		/// Train combined ANN.
		/// note: The problem (?) is that classIndex array, which defines correspondence between ANN outputs and class indices, is not set.
		/// </summary>
		/// <param name="annProps">Combined ANN properties.</param>
		/// <param name="t">Threshold value.</param>
		public static CombinedAnnTrainingRes TrainEvoPca(CombinedAnnProperties annProps, float t)
		{
			if (annProps.TrainData == null) throw new Exception("[TrainAnn1] error: Undefined training data.");

			var res = new CombinedAnnTrainingRes();
			annProps.ObjFunction.TrainData = annProps.TrainData;

			//
			// 1. Train 1st ANN.
			LayeredNeuralNetwork net1;
			res.Ann1TrainingStats = TrainAnn1EvoPca2(annProps.TrainData, annProps.PropsNet1, annProps.EAParams, annProps.ObjFunction, t, out net1);
			res.Network1 = net1;
			res.Networks2 = new List<BasicNetwork>();
			res.Network2TrainingEpochs = new List<int>();

			// Modify ann-2 props according to the net1 training results.
			annProps.PropsNet2.nodesNumber[0] = net1.OutputsNumber;

			//
			// 2. Train 2nd ANN to classify data.
			var minValError = float.MaxValue;
			var combAnn = new CombinedAnn();
			combAnn.TrainData = annProps.TrainData;
			combAnn.Network1 = net1;
			combAnn.UnitedTraining = annProps.UnitedTraining;
			for (int i = 0; i < annProps.Ann2Trials; i++)
			{
				BasicNetwork net2;
				var ann2stats = annProps.UseEarlyStopping ?
					TrainAnn2(annProps.TrainData, annProps.ValidationData, res.Network1, annProps.PropsNet2, annProps.Epochs, out net2)
					: TrainAnn2(annProps.TrainData, res.Network1, annProps.PropsNet2, annProps.Epochs, out net2);
				res.Network2TrainingEpochs.Add(ann2stats.Count);

				//
				// test obtained ANN-2.
				combAnn.Network2 = net2;
				var er = TestCombinedAnn(combAnn, annProps.ValidationData);
				res.Networks2.Add(net2);

				//
				// correct [er] using L2 norm for ANN-2 weights matrix.
				if (annProps.Regularizer != null)
				{
					var w = MachineLearningElements.GetWeights(net2);
					er += annProps.Regularizer(w);
				}

				//
				// if the resulting error is minimal then save current ANN-2.
				if (minValError > er)
				{
					minValError = er;
					res.Ann2TrainingStats = ann2stats;
					res.Network2 = net2;
				}
			}

			res.AnnProps = annProps;
			return res;
		}
	
		/// <summary>
		/// [molecule]
		/// 
		/// Train combined ANN so that ANN-1 weights are tuned as weel when ANN-2 is trained.
		/// note: The problem (?) is that classIndex array, which defines correspondence between ANN outputs and class indices, is not set.
		/// </summary>
		/// <param name="annProps">Combined ANN properties.</param>
		public static CombinedAnnTrainingRes TrainUni(CombinedAnnProperties annProps)
		{
			if (annProps.TrainData == null) throw new Exception("[TrainAnn1] error: Undefined training data.");

			var res = new CombinedAnnTrainingRes();
			annProps.ObjFunction.TrainData = annProps.TrainData;

			//
			// 1. Train 1st ANN.
			LayeredNeuralNetwork net1;
			annProps.PropsNet1.UseBias = false;
			res.Ann1TrainingStats = TrainAnn1(annProps.TrainData, annProps.PropsNet1, annProps.EAParams, annProps.ObjFunction, out net1);
			res.Network1 = net1;
			res.Networks2 = new List<BasicNetwork>();

			//
			// 2. Train 2nd ANN to classify data.
			var minValError = float.MaxValue;
			var combAnn = new CombinedAnn();
			combAnn.TrainData = annProps.TrainData;
			combAnn.Network1 = net1;
			combAnn.UnitedTraining = annProps.UnitedTraining;
			for (int i = 0; i < annProps.Ann2Trials; i++)
			{
				BasicNetwork net2;
				var ann2stats = TrainAnn2Uni(annProps.TrainData, res.Network1, annProps.PropsNet2, annProps.Epochs, out net2);

				//
				// test obtained ANN-2.
				combAnn.Network2 = net2;
				var er = TestCombinedAnn(combAnn, annProps.ValidationData);
				res.Networks2.Add(net2);

				//
				// correct [er] using L2 norm for ANN-2 weights matrix.
				if (annProps.Regularizer != null)
				{
					var w = MachineLearningElements.GetWeights(net2);
					er += annProps.Regularizer(w);
				}

				//
				// if the resulting error is minimal then save current ANN-2.
				if (minValError > er)
				{
					minValError = er;
					res.Ann2TrainingStats = ann2stats;
					res.Network2 = net2;
				}
			}

			res.AnnProps = annProps;
			return res;
		}

		/// <summary>
		/// [molecule]
		/// 
		/// Train combined ANN with advanced selection procedure for ANN-2 based upon weighting errors on training and the validation set.
		/// </summary>
		/// <param name="trainData">Data for ANN training.</param>
		/// <param name="evalData">Data for ANN-2 evaluation.</param>
		/// <param name="props1">ANN-1 properties.</param>
		/// <param name="eaParams">Parametres for ANN-1 training.</param>
		/// <param name="objFunction">Objective function for ANN-1 training.</param>
		/// <param name="props2">ANN-2 properties.</param>
		/// <param name="epochs">ANN-2 training epochs.</param>
		/// <param name="ann2Trials">Number of ANN-2 training trials.</param>
		public static CombinedAnnTrainingRes TrainAdvSelection(List<TrainingSample> trainData, List<TrainingSample> evalData,
													NeuralNetProperties props1, EAParameters eaParams, NEObjFunction objFunction,
													NeuralNetProperties props2, int epochs, int ann2Trials, Regularizer regularizer)
		{
			if (trainData == null) throw new Exception("[TrainAnn1] error: Undefined training data.");

			var res = new CombinedAnnTrainingRes();
			objFunction.TrainData = trainData;

			//
			// 1. Train 1st ANN.
			LayeredNeuralNetwork net1;
			res.Ann1TrainingStats = TrainAnn1(trainData, props1, eaParams, objFunction, out net1);
			res.Network1 = net1;
			res.Networks2 = new List<BasicNetwork>();

			//
			// 1.9. Prepare data for weighting errors.
			var dataDist = MachineLearningElements.CalculateDistance(trainData, evalData);	// distance between training and validation data sets.
			var a = -5;
			var trainW = (float)Math.Exp(a * dataDist);	// weight for the training data set error.
			var evalW = 1 - trainW;	// weight for evaluation data set error.

			//
			// 2. Train 2nd ANN to classify data.
			var minValError = float.MaxValue;
			var combAnn = new CombinedAnn();
			combAnn.TrainData = trainData;
			combAnn.Network1 = net1;
			for (int i = 0; i < ann2Trials; i++)
			{
				BasicNetwork net2;
				var ann2stats = TrainAnn2(trainData, res.Network1, props2, epochs, out net2);

				//
				// test obtained ANN-2.
				combAnn.Network2 = net2;
				var er = TestCombinedAnn(combAnn, evalData);
				res.Networks2.Add(net2);

				var wEr = trainW * ann2stats[epochs - 1] + evalW * er;

				//
				// correct [er] using L2 norm for ANN-2 weights matrix.
				if (regularizer != null)
				{
					var w = MachineLearningElements.GetWeights(net2);
					wEr += regularizer(w);
				}

				//
				// if the resulting error is minimal then save current ANN-2.
				if (minValError > wEr)
				{
					minValError = wEr;
					res.Ann2TrainingStats = ann2stats;
					res.Network2 = net2;
				}
			}

			return res;
		}

		/// <summary>
		/// Trains 1st ANN.
		/// </summary>
		/// <param name="trainData"></param>
		/// <param name="props1"></param>
		/// <param name="eaParams"></param>
		/// <param name="objFunction"></param>
		/// <param name="net1"></param>
		/// <returns></returns>
		public static List<Stats> TrainAnn1(List<TrainingSample> trainData, NeuralNetProperties props1, EAParameters eaParams, NEObjFunction objFunction, out LayeredNeuralNetwork net1)
		{
			if (trainData == null) throw new Exception("[TrainAnn1] error: Undefined training data.");

			net1 = LayeredNeuralNetwork.CreateNetwork(props1);
			objFunction.Network = net1;

			// 2. Train 1st ANN.
			// 2.1. Create NE algorithm.
			var ne = new NeuroEvolutionaryAlgorithm();

			eaParams.MinGeneValue = -0.1f;
			eaParams.GeneValueRange = 0.2f;
			eaParams.IndividualSize = net1.GetTotalConnectionsNumber();
			eaParams.MRate = 1f / eaParams.IndividualSize;

			// 2.2. Run NE algorithm.
			FitnessComparator.MinimizeFitness = objFunction.MinimizeFitness;
			ne.FitnessFunction = objFunction;
			ne.NeuralNetwork = net1;
			ne.Run(eaParams);
			net1.SetConnectionWeights(ne.BestIndividual.Genes.ToArray());

			return ne.FitnessStats;
		}

		/// <summary>
		/// Trains 1st ANN using EvoPCA algorithm.
		/// </summary>
		/// <param name="trainData"></param>
		/// <param name="props1"></param>
		/// <param name="eaParams"></param>
		/// <param name="objFunction"></param>
		/// <param name="t">Threshold value for EvoPCA algorithm.</param>
		/// <param name="net1"></param>
		/// <returns></returns>
		public static List<Stats> TrainAnn1EvoPca3(List<TrainingSample> trainData, NeuralNetProperties props1, EAParameters eaParams, NEObjFunction objFunction, float t, out LayeredNeuralNetwork net1)
		{
			if (trainData == null) throw new Exception("[TrainAnn1] error: Undefined training data.");

			net1 = LayeredNeuralNetwork.CreateNetwork(props1);
			objFunction.Network = net1;

			//var sumVar = MachineLearningElements.ComputeSumProjectionVariance(trainData);

			// 2. Train 1st ANN.
			// 2.1. Create NE algorithm.
			var ne = new EvoPCA3();
			ne.FactorThreshold = t;
			ne.ReferenceSumVariance = 1f;	// note!!!

			eaParams.MinGeneValue = -0.1f;
			eaParams.GeneValueRange = 0.2f;
			eaParams.IndividualSize = net1.GetTotalConnectionsNumber();
			eaParams.MRate = 1f / eaParams.IndividualSize;

			// 2.2. Run NE algorithm.
			FitnessComparator.MinimizeFitness = objFunction.MinimizeFitness;
			ne.FitnessFunction = objFunction;
			ne.NeuralNetwork = net1;
			ne.Run(eaParams);

			ne.NeuralNetwork.SetConnectionWeights(ne.BestIndividual.Genes.ToArray());
			net1 = ne.NeuralNetwork;

			return ne.FitnessStats;
		}

		/// <summary>
		/// Trains 1st ANN using EvoPCA algorithm.
		/// </summary>
		/// <param name="trainData"></param>
		/// <param name="props1"></param>
		/// <param name="eaParams"></param>
		/// <param name="objFunction"></param>
		/// <param name="t">Threshold value for EvoPCA algorithm.</param>
		/// <param name="net1"></param>
		/// <returns></returns>
		public static List<Stats> TrainAnn1EvoPca2(List<TrainingSample> trainData, NeuralNetProperties props1, EAParameters eaParams, NEObjFunction objFunction, float t, out LayeredNeuralNetwork net1)
		{
			if (trainData == null) throw new Exception("[TrainAnn1] error: Undefined training data.");

			net1 = LayeredNeuralNetwork.CreateNetwork(props1);
			objFunction.Network = net1;

			//var sumVar = MachineLearningElements.ComputeSumProjectionVariance(trainData);

			// 2. Train 1st ANN.
			// 2.1. Create NE algorithm.
			var ne = new EvoPCA2();
			ne.FactorThreshold = t;
			ne.ReferenceSumVariance = 1f;	// note!!!

			eaParams.MinGeneValue = -0.1f;
			eaParams.GeneValueRange = 0.2f;
			eaParams.IndividualSize = net1.GetTotalConnectionsNumber();
			eaParams.MRate = 1f / eaParams.IndividualSize;

			// 2.2. Run NE algorithm.
			FitnessComparator.MinimizeFitness = objFunction.MinimizeFitness;
			ne.FitnessFunction = objFunction;
			ne.NeuralNetwork = net1;
			ne.Run(eaParams);

			ne.NeuralNetwork.SetConnectionWeights(ne.BestIndividual.Genes.ToArray());
			net1 = ne.NeuralNetwork;

			return ne.FitnessStats;
		}

		/// <summary>
		/// [molecule]
		/// 
		/// Trains 2nd ANN.
		/// </summary>
		/// <param name="net1"></param>
		/// <param name="props2"></param>
		/// <param name="epochs"></param>
		/// <param name="trainData"></param>
		/// <param name="net2"></param>
		public static List<float> TrainAnn2(List<TrainingSample> trainData, LayeredNeuralNetwork net1, NeuralNetProperties props2, int epochs, out BasicNetwork net2)
		{
			#region - Convert training samples. -
			INeuralDataSet samples = new BasicNeuralDataSet();
			for (int i = 0; i < trainData.Count; ++i)
			{
				var outs = MachineLearningElements.GetOutputs(net1, trainData[i].Data);

				// convert data from floats to doubles and create new training samples.
				var outsD = VectorMath.ConvertToDoubles(outs.ToArray());
				var outRow = MatrixMath.GetRow(trainData[i].Response, 0);
				var outRowD = VectorMath.ConvertToDoubles(outRow);
				samples.Add(new BasicNeuralData(outsD), new BasicNeuralData(outRowD));
			}
			#endregion

			net2 = MachineLearningElements.CreateEncogBasicNetwork(props2);
			var ers = MachineLearningElements.TrainEncogNetwork(net2, samples, epochs);

			return ers;
		}

		/// <summary>
		/// [molecule]
		/// 
		/// Trains 2nd ANN using training and validation data and early stopping mechanism to avoid overtraining.
		/// </summary>
		/// <param name="validData"></param>
		/// <param name="net1"></param>
		/// <param name="props2"></param>
		/// <param name="epochs"></param>
		/// <param name="trainData"></param>
		/// <param name="net2"></param>
		public static List<float> TrainAnn2(List<TrainingSample> trainData, List<TrainingSample> validData, LayeredNeuralNetwork net1, NeuralNetProperties props2, int epochs, out BasicNetwork net2)
		{
			#region - Convert training and validation samples. -
			INeuralDataSet samples = new BasicNeuralDataSet();
			for (int i = 0; i < trainData.Count; ++i)
			{
				var outs = MachineLearningElements.GetOutputs(net1, trainData[i].Data);

				// convert data from floats to doubles and create new training samples.
				var outsD = VectorMath.ConvertToDoubles(outs.ToArray());
				var outRow = MatrixMath.GetRow(trainData[i].Response, 0);
				var outRowD = VectorMath.ConvertToDoubles(outRow);
				samples.Add(new BasicNeuralData(outsD), new BasicNeuralData(outRowD));
			}

			INeuralDataSet validSamples = new BasicNeuralDataSet();
			for (int i = 0; i < validData.Count; ++i)
			{
				var outs = MachineLearningElements.GetOutputs(net1, validData[i].Data);

				// convert data from floats to doubles and create new training samples.
				var outsD = VectorMath.ConvertToDoubles(outs.ToArray());
				var outRow = MatrixMath.GetRow(validData[i].Response, 0);
				var outRowD = VectorMath.ConvertToDoubles(outRow);
				validSamples.Add(new BasicNeuralData(outsD), new BasicNeuralData(outRowD));
			}
			#endregion

			net2 = MachineLearningElements.CreateEncogBasicNetwork(props2);
			var ers = MachineLearningElements.TrainEncogNetwork(net2, samples, validSamples, epochs);

			return ers;
		}

		/// <summary>
		/// Trains ANN-1 + ANN-2 using gradient algorithm.
		/// </summary>
		/// <param name="net1"></param>
		/// <param name="props2"></param>
		/// <param name="epochs"></param>
		/// <param name="trainData"></param>
		/// <param name="net2"></param>
		public static List<float> TrainAnn2Uni(List<TrainingSample> trainData, LayeredNeuralNetwork net1, NeuralNetProperties props2, int epochs, out BasicNetwork net2)
		{
			#region - Convert training samples. -
			INeuralDataSet samples = new BasicNeuralDataSet();
			for (int i = 0; i < trainData.Count; ++i)
			{
				// convert data from floats to doubles and create new training samples.
				var inpRow = MatrixMath.GetRow(trainData[i].Data, 0);
				var inpRowD = VectorMath.ConvertToDoubles(inpRow);
				var outRow = MatrixMath.GetRow(trainData[i].Response, 0);
				var outRowD = VectorMath.ConvertToDoubles(outRow);
				samples.Add(new BasicNeuralData(inpRowD), new BasicNeuralData(outRowD));
			}
			#endregion

			//
			// create properties for the united network.
			var net2Props = new NeuralNetProperties();
			net2Props.nodesNumber = new [] { net1.Layers[0].Nodes.Count, net1.Layers[1].Nodes.Count, props2.nodesNumber[1]};
			net2Props.actFunctions = new[] { ActivationFunctions.Linear, net1.Layers[1].Nodes[0].ActivationFunction, props2.actFunctions[1] };
			net2Props.UseBias = false;//props2.UseBias;

			//
			// create united network.
			net2 = MachineLearningElements.CreateEncogBasicNetwork(net2Props);

			//
			// copy weights from ANN-1 into the [net2].
			var inputLayer = net2.LayerTags[MachineLearningElements.ENCOG_INPUT_LAYER_TAG].Next[0];	// now inputLayer.WeightMatrix -- contains weights of outcoming connections from the input layer.
			
			#region - Copy weights from ANN-1 into the first layer of ANN-2.
			var w1 = net1.Layers[1].GetWeights();
			var w1tr = MatrixMath.Transpose(w1);

			var rows = w1tr.Count;
			var cols = w1.Count;
			for (int i = 0; i < rows-1; i++)
			{
				for (int j = 0; j < cols; j++)
				{
					inputLayer.WeightMatrix[i, j] = w1tr[i+1][j];	// i+1 because 0th row contains biases.
				}
			}

			// copy bias values.
			var hidLayer = net2.Structure.Layers[1];
			for (int i = 0; i < hidLayer.Threshold.Length; i++)
			{
				hidLayer.Threshold[i] = w1tr[0][i];
			}
			#endregion


			//var hiddenLayer = net2.LayerTags["INPUT"];	// get input layer.
			//hiddenLayer = hiddenLayer.NextLayers.GetEnumerator().MoveNext();	//
			//foreach (var layer in layers.Values)
			//{

			//    //layer.
			//}
			//???

			var ers = MachineLearningElements.TrainEncogNetwork(net2, samples, epochs);

			return ers;
		}

		#region - Methods for incremental training. -
		/// <summary>
		/// [molecule]
		/// 
		/// Continues training of already existing ANN1 using specified EA parameters and an objective function.
		/// </summary>
		/// <param name="trainData"></param>
		/// <param name="ne"></param>
		/// <param name="eaParams"></param>
		/// <param name="objFunction"></param>
		/// <param name="net1"></param>
		/// <returns></returns>
		public static List<Stats> ContinueTrainAnn1(List<TrainingSample> trainData, NeuroEvolutionaryAlgorithm ne, EAParameters eaParams, NEObjFunction objFunction, LayeredNeuralNetwork net1)
		{
			if (trainData == null) throw new Exception("[TrainAnn1] error: Undefined training data.");

			// 2.2. Run NE algorithm.
			FitnessComparator.MinimizeFitness = objFunction.MinimizeFitness;
			ne.FitnessFunction = objFunction;
			ne.NeuralNetwork = net1;
			ne.Continue(eaParams);
			net1.SetConnectionWeights(ne.BestIndividual.Genes.ToArray());

			return ne.FitnessStats;
		}
		#endregion
		#endregion

		#region - Testing. -
		/// <summary>
		/// [molecule]
		/// 
		/// Test given combined ANN on the specified data set with labelled entries.
		/// </summary>
		/// <param name="combAnn">Combined Ann.</param>
		/// <param name="data">Data for testing.</param>
		/// <returns>Error rate.</returns>
		public static float TestCombinedAnn(CombinedAnn combAnn, List<TrainingSample> data)
		{
			if (combAnn.Network1 == null || combAnn.Network2 == null) throw new Exception("[CombinedAnn.CombinedAnnElements] error: Perform training first!");

			//
			// test combination of the 1-st and the second ANN.
			int erCount = 0;
			int size = data.Count;
			for (int i = 0; i < size; ++i)
			{
				int winner = combAnn.Recognize(data[i].Data);
				var rowReq = MatrixMath.GetRow(data[i].Response, 0);
				var reqWinner = VectorMath.FirstIndexOf(rowReq, 1.0f);

				if (winner != reqWinner)
				{
					++erCount;
				}
			}

			//
			// todo: calculate confusion matrix.
			var testErrorRate = (float)erCount / size;
			return testErrorRate;
		}
		
		/// <summary>
		/// [molecule]
		/// 
		/// Tests given array of combined ANNs using the given data.
		/// </summary>
		/// <param name="nets">List of trained combined ANNs.</param>
		/// <param name="classIndex">Output-class ID corresponce.</param>
		/// <param name="testData">Test data set.</param>
		/// <returns>List of stats for each combined ANN.</returns>
		public static List<Stats> TestTrainedCombinedAnns (List<CombinedAnnTrainingRes> nets, Dictionary<int, int> classIndex, List<TrainingSample> testData)
		{
			var res = new List<Stats>();

			foreach (var net in nets)
			{
				var temp = new CombinedAnn();
				temp.Network1 = net.Network1;
				temp.ClassIndex = classIndex;
				temp.UnitedTraining = net.AnnProps.UnitedTraining;

				var ers = new List<float>();
				foreach (var network in net.Networks2)
				{
					temp.Network2 = network;
					var er = TestCombinedAnn(temp, testData);
					ers.Add(er);
				}

				res.Add( VectorMath.CalculateStats(ers.ToArray()) );
			}

			return res;
		}
		#endregion
	}

	/// <summary>
	/// [molecule]
	/// 
	/// Class to store results of the Combined ANN training.
	/// </summary>
	public class CombinedAnnTrainingRes
	{
		/// <summary>
		/// Properties for Combined ANN.
		/// </summary>
		public CombinedAnnProperties AnnProps { get; set; }

		/// <summary>
		/// Statistics for the ANN-1 training (evolution statistics).
		/// </summary>
		public List<Stats> Ann1TrainingStats { get; set; }
		/// <summary>
		/// Statistics for the ANN-2 training (errors).
		/// </summary>
		public List<float> Ann2TrainingStats { get; set; }

		/// <summary>
		/// ANN-1.
		/// </summary>
		public LayeredNeuralNetwork Network1 { get; set; }
		/// <summary>
		/// ANN-2.
		/// </summary>
		public BasicNetwork Network2 { get; set; }

		/// <summary>
		/// The number of epochs for each ANN-2, which were actually used to train ANN-2. Can differ from the training settings due to early stopping.
		/// </summary>
		public List<int> Network2TrainingEpochs { get; set; }

		/// <summary>
		/// List of all ANN-2 networks obtained at different trials.
		/// </summary>
		public List<BasicNetwork> Networks2 { get; set; }
	}
}
