﻿using System;
using System.Collections.Generic;
using MathNet.Numerics.Distributions;
using MentalAlchemy.Atomics;

namespace MentalAlchemy.Molecules.MachineLearning.GradEAAlg
{
	[Serializable]
	public abstract class Operator
	{
		public EvolutionaryAlgorithm Owner { get; set; }
		public EAParameters Parameters { get; set; }

		public string Name { get; set; }
		public bool UseHistory { get; set; }

		public virtual void SetParameters (EAParameters pars)
		{
			Parameters = pars;
		}

		public abstract List<AbstractIndividual> Operate(List<AbstractIndividual> inds);
	}

	#region - Mutation operators. -
	[Serializable]
	public abstract class Mutation : Operator { }

	[Serializable]
	public class GaussianMutation : Mutation
	{
		public const string NAME = "GaussianMutation";

		protected GaussianMutation() { }
		public GaussianMutation(EvolutionaryAlgorithm ea)
		{
			Owner = ea;
			Name = NAME;
		}

		/// <summary>
		/// The function expects single individual with estimated fitness value on input and returns a mutated individual.
		/// </summary>
		/// <param name="inds"></param>
		/// <returns></returns>
		public override List<AbstractIndividual> Operate(List<AbstractIndividual> inds)
		{
			var sigma = Parameters.MutationStep;
			var rand = new NormalDistribution(0, sigma);
			var ind = inds[0];
			var resInd = (Individual)ind.Clone();

			//
			// perform selection of changes based upon changes history.
			//

			// pick the best change according to history.
			// note: Epsilon is equal to sigma! This maybe not the best solution!
			ChangeData ch;
			if (UseHistory)
			{
				ch = GradEAElements.PickChanges(ChangesLog.Instance, this, (Individual)ind, VectorMath.EuclidianDistance, sigma);

				// if ch is empty or null then make random change.
				if (ch == null)
				{
					ch = new ChangeData();
					ch.Change = VectorMath.CreateRandomVector(rand, ind.Size);
				}
			}
			else
			{
				ch = new ChangeData();
				ch.Change = VectorMath.CreateRandomVector(rand, ind.Size);
			}

			// apply the changes ...
			//var genes = VectorMath.Add(resInd.Genes.ToArray(), ch.Change);
			//resInd.Genes = new List<float>(resInd.Genes.ToArray());
			resInd.ApplyChanges(ch.Change);

			// ... and evaluate resulting individual.
			resInd.Fitness = Owner.FitnessFunction.Compute(resInd.Genes.ToArray());

			// regiset changes.
			if (UseHistory)
			{
				// set changes parameters and register changes.
				ch.Actor = this;
				ch.Individual = (Individual)ind;
				ch.FitnessChange = resInd.Fitness - ind.Fitness;
				ChangesLog.Instance.RegisterChanges(ch);
			}

			//
			// return results.
			var res = new List<AbstractIndividual>();
			res.Add(resInd);
			return res;
		}
	} 
	#endregion

	#region - Crossover operators. -
	/// <summary>
	/// [molecule]
	/// 
	/// Generic crossover operator class.
	/// </summary>
	public abstract class Crossover : Operator{}

	/// <summary>
	/// [molecule]
	/// 
	/// Performs weighted crossing of individuals using their fitness values as weights.
	/// The crossing uses linear approximation of gradient of fitness function to
	/// produce a new offsprings.
	/// </summary>
	public class WeightedCrossover : Crossover
	{
		public const string NAME = "WeightedCrossover";

		public bool UseNormalizedWeights { get; set; }

		protected WeightedCrossover() { }
		public WeightedCrossover(EvolutionaryAlgorithm ea)
		{
			Owner = ea;
			Name = NAME;
		}

		#region Overrides of Operator
		public override List<AbstractIndividual> Operate(List<AbstractIndividual> inds)
		{
			if (inds.Count < 2) { return new List<AbstractIndividual>(); }

			var ind1 = (Individual)inds[0];
			var ind2 = (Individual)inds[1];

			//
			// use fitness as weight.
			var w1 = inds[0].Fitness.Value;
			var w2 = inds[1].Fitness.Value;

			if (UseNormalizedWeights)
			{
				var sumW = w1 + w2;
				w1 /= sumW;
				w2 /= sumW;
			}

			//
			// define offset and distance between the individuals.
			var sub = VectorMath.Sub(ind1.Genes.ToArray(), ind2.Genes.ToArray());
			var dist = VectorMath.L2Norm(sub);

			var child1 = new Individual(ind1);
			var child2 = new Individual(ind1);
			if (dist > 0)
			{	// if parents are not identical.
				//
				// compute gradient estimate.
				var grad = Math.Abs(w1 - w2) / dist;
				var delta = VectorMath.Mul(sub, grad);

				//
				// create children.
				var d1 = VectorMath.Mul(delta, (float)ContextRandom.NextDouble());	// delta1.
				//var d2 = VectorMath.Mul(delta, (float)ContextRandom.NextDouble());	// delta2.

				child1.ApplyChanges(d1);
				child1.Fitness = Owner.FitnessFunction.Compute(child1.Genes.ToArray());

				var d2 = VectorMath.Mul(d1, -1f);	// delta2.
				child2.ApplyChanges(d2);
				child2.Fitness = Owner.FitnessFunction.Compute(child2.Genes.ToArray());

				if (UseHistory)
				{
					ChangesLog.Instance.RegisterChanges(this, ind1, d1, child1.Fitness - inds[0].Fitness);
					ChangesLog.Instance.RegisterChanges(this, ind2, d2, child2.Fitness - inds[1].Fitness);
				}
			}

			var res = new List<AbstractIndividual>();
			res.Add(child1);
			res.Add(child2);
			return res;
		}
		#endregion
	}
	#endregion
}