﻿using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary;
using MentalAlchemy.Atomics;

namespace MentalAlchemy.Molecules.MachineLearning
{
	/// <summary>
	/// Class for the "probabilistic" classifier.
	/// </summary>
	[Serializable]
	public class ProbabilisticClassifier : IClassifier
	{
		protected List<VectorProb> tData = new List<VectorProb>();
		protected int classCount = -1;

		#region - Public properties. -
		/// <summary>
		/// Property for setting precision in processing of training and recognition data.
		/// </summary>
		public float Epsilon { get; set; }

		/// <summary>
		/// Defines proportion of training data to be used in a conventional way (as 'seed').
		/// </summary>
		public float SeedRate { get; set; }

		/// <summary>
		/// Random numbers generator.
		/// </summary>
		public Random RNG { get; set; }
		#endregion

		#region - Construction. -
		public ProbabilisticClassifier ()
		{
			Epsilon = 0f;
			RNG = new Random();
			SeedRate = 1f;
		}

		public ProbabilisticClassifier(ProbabilisticClassifier pc)
		{
			Epsilon = pc.Epsilon;
			RNG = new Random();
			SeedRate = pc.SeedRate;
			classCount = pc.classCount;
			tData = new List<VectorProb>(pc.tData);
		}
		#endregion

		#region - Implementation of the [IClassifier] interface.
		public void Train(List<TrainingSample> data)
		{
			Train(data, SeedRate, RNG, Epsilon);
		}
		public virtual int Recognize(float[,] obj)
		{
			return Recognize(obj, Epsilon);
		}

		public Dictionary<int, int> GetClassVotes(float[,] obj)
		{
			return GetClassVotes(obj, Epsilon);
		}

		public virtual Dictionary<int, float> GetClassProbabilities(float[,] obj)
		{
			return GetClassProbabilities(obj, Epsilon);
		}

		public object Clone()
		{
			return new ProbabilisticClassifier(this);
		}
		#endregion

		#region - Training methods. -
		/// <summary>
		/// Train the probabilistic classifier using given [data] and setting some portion of it as a seed.
		/// </summary>
		/// <param name="data">Training data.</param>
		/// <param name="seedRate">Proportion of training data which should be used as a seed.</param>
		/// <param name="rand"></param>
		/// <param name="eps"></param>
		public void Train(List<TrainingSample> data, float seedRate, Random rand, float eps)
		{
			var inputData = new List<TrainingSample>(data);
			classCount = MachineLearningElements.CalculateClasses(inputData.ToArray());

			// select seed samples.
			var seedCount = (int)(inputData.Count * seedRate + 0.5f);
			var seed = new List<TrainingSample>();
			for (var i = 0; i < seedCount; i++)
			{
				var idx = rand.Next(inputData.Count);
				seed.Add(inputData[idx]);
				inputData.RemoveAt(idx);
			}

			Seed(seed, classCount, eps);	// initialize the classifier.
			foreach (var sample in inputData)
			{	// update classifier using training samples.
				Update(sample, classCount, eps);
			}
		}

		/// <summary>
		/// Initialize classifier's data.
		/// </summary>
		/// <param name="data"></param>
		/// <param name="clCount"></param>
		/// <param name="eps"></param>
		public virtual void Seed(List<TrainingSample> data, int clCount, float eps)
		{
			tData.Clear();
			var vprobs = MachineLearningElements.ConvertToVectorProbs(data);
			foreach (var sample in data)
			{
				// get separate row-vectors from each training sample.
				var rows = MatrixMath.ConvertToRowsList(sample.Data);
				AddSeed(vprobs, rows, sample.ClassID, clCount, eps);
			}
		}

		/// <summary>
		/// Adds given list of vectors with the same [classId] to the existing training data with respect to the accuracy parameter.
		/// </summary>
		/// <param name="vprobs">List of reference [VectorProb] entities.</param>
		/// <param name="rows">Vectors to add.</param>
		/// <param name="classId">Common class ID for each vector.</param>
		/// <param name="clCount">Overall number of classes.</param>
		/// <param name="eps">Accuracy parameter.</param>
		protected virtual void AddSeed(List<VectorProb> vprobs, List<float[]> rows, int classId, int clCount, float eps)
		{
			var tempData = new List<VectorProb>();
			foreach (var row in rows)
			{
				var pairs = MachineLearningElements.GetSimilarVectorProbEntries(vprobs, row, VectorMath.MaxAbsDistance, eps);

				if (pairs.Count == 0)
				{	// if no similar pairs are found then create a new one.
					var pair = new VectorProb();
					pair.Init(row);
					pair.counts.Add(classId, 1);
					tempData.Add(pair);
				}
				else
				{	// else average found pairs to create a kind of 'combined' response.
					var pair = MachineLearningElements.AverageIgnoreCounts(pairs);
					//var pair = new VectorProb();
					//pair.vector = row;
					//pair.counts = MachineLearningElements.SumCounts(pairs);
					//pair.probs = StructMath.ConvertToProbabilities(pair.counts);

					if (pair.counts.ContainsKey(classId))
					{
						pair.counts[classId]++;
					}
					else
					{
						pair.counts.Add(classId, 1);
					}
					tempData.Add(pair);
				}
			}

			//
			// validate obtained object description written in tempData.
			// the resulting class should be equal to [classId].
			if (!ValidateDescription(tempData, classId))
			{
				tempData = CorrectDescription(tempData, classId);
			}

			tData.AddRange(tempData);
		}

		/// <summary>
		/// Updates training data using given training sample and precision.
		/// </summary>
		/// <param name="sample">Training sample.</param>
		/// <param name="clCount">Overall number of classes.</param>
		/// <param name="eps">Precision parameter.</param>
		public virtual void Update(TrainingSample sample, int clCount, float eps)
		{
			var rows = MatrixMath.ConvertToRowsList(sample.Data);
			AddSeed(tData, rows, sample.ClassID, clCount, eps);
		}

		/// <summary>
		/// [molecule]
		/// 
		/// Indicates whether given object decription corresponds to the specified class.
		/// </summary>
		/// <param name="vps">Description.</param>
		/// <param name="classId">Class ID to check.</param>
		/// <returns>[True] if object class equals to [classId] and [False] otherwise.</returns>
		public static bool ValidateDescription (List<VectorProb> vps, int classId)
		{
			var counts = MachineLearningElements.SumCounts(vps);
			var winClassId = MachineLearningElements.GetMaxClassId(counts, -1);
			return winClassId == classId;
		}

		public static List<VectorProb> CorrectDescription(List<VectorProb> vps, int classId)
		{
			var tempVps = new List<VectorProb>();
			tempVps.AddRange(vps);

			var res = new List<VectorProb>();

			//
			// move 'good' [VectorProb] objects, which description equals to the [classId].
			var rates = new Dictionary<VectorProb, float>();
			for (int i = 0; i < vps.Count; i++)
			{
				var tempVp = tempVps[0];
				var vpClass = MachineLearningElements.GetMaxClassId(tempVp.counts, -1);

				if (vpClass == classId)
				{
					res.Add(tempVp);	// move element to [res].
				}
				else
				{
					var rate = 1.0f;
					if (vpClass >= 0)
					{
						rate = tempVp.counts.ContainsKey(classId)
							? (float)tempVp.counts[vpClass] / tempVp.counts[classId]
							: tempVp.counts[vpClass];
					}
					// calculate rate, which shows how strong the votes for the 'wrong' class.
					rates = StructMath.InsertDescendingValue(rates, tempVp, rate);
				}
				tempVps.RemoveAt(0);
			}

			//
			// calculate votes in the [res] in its current state.
			var resCounts = MachineLearningElements.SumCounts(res);
			
			//
			// [rates] are sorted by rate value in the descending order.
			foreach (var rate in rates)
			{
				StructMath.Accumulate(ref resCounts, rate.Key.counts);
				var vpClass = MachineLearningElements.GetMaxClassId(resCounts, -1);

				if (vpClass == classId)
				{	// if there is no contradiction then accept [rate.Key] object.
					res.Add(rate.Key);
				}
				else
				{
					// else correct [rate.Key].
					StructMath.Deaccumulate(ref resCounts, rate.Key.counts);

					// swap values between counts[classId] and counts[maxValueKey]
					var reqClassCount = rate.Key.counts[classId];
					var maxValueKey = StructMath.FindMaxValueKey(rate.Key.counts);
					rate.Key.counts[classId] = rate.Key.counts[maxValueKey];
					rate.Key.counts[maxValueKey] = reqClassCount;

					StructMath.Accumulate(ref resCounts, rate.Key.counts);
					res.Add(rate.Key);

					//
					// old variant.
					// break;
				}
			}

			return res;
		}
		#endregion

		#region - Recognition. -

		/// <summary>
		/// Performs recognition by the given object description.
		/// </summary>
		/// <param name="obj">Object description.</param>
		/// <param name="eps">Precision.</param>
		/// <returns>Class ID or '-1' if the class is unrecognized.</returns>
		public virtual int Recognize(float[,] obj, float eps)
		{
			var counts = GetClassVotes(obj, eps);
			return MachineLearningElements.GetMaxClassId(counts, -1);
		}

		/// <summary>
		/// Perform voting for the given object using training data.
		/// </summary>
		/// <param name="obj">Object description.</param>
		/// <param name="eps">Precision.</param>
		/// <returns>Dictionary of votes per class.</returns>
		public Dictionary<int, int> GetClassVotes(float[,] obj, float eps)
		{
			var rows = MatrixMath.ConvertToRowsList(obj);
			var counts = new Dictionary<int, int>();
			foreach (var row in rows)
			{
				var pairs = FindAllVectorPairs(row, eps);
				//var pairs = new List<VectorProb> ();
				//pairs.Add(FindClosestVectorPair(row, eps));
				if (pairs.Count > 0 && pairs[0].counts != null)
				{
					var pair = MachineLearningElements.SumCounts(pairs);
					foreach (var entry in pair)
					{
						if (!counts.ContainsKey(entry.Key))
						{
							counts.Add(entry.Key, 0);
						}
						counts[entry.Key] += entry.Value;
					}
				}
			}
			return counts;
		}

		/// <summary>
		/// For the given object get probabilities that it belongs to one of known classes.
		/// </summary>
		/// <param name="obj">Object description.</param>
		/// <param name="eps">Precision.</param>
		/// <returns>Dictionary of probabilities per class.</returns>
		public virtual Dictionary<int, float> GetClassProbabilities(float[,] obj, float eps)
		{
			var rows = MatrixMath.ConvertToRowsList(obj);
			var probs = new Dictionary<int, float>();
			var pairCount = 0;
			foreach (var row in rows)
			{
				var pairs = FindAllVectorPairs(row, eps);
				if (pairs.Count > 0)
				{
					var pair = MachineLearningElements.AverageIgnoreCounts(pairs);
					foreach (var entry in pair.probs)
					{
						if (!probs.ContainsKey(entry.Key))
						{
							probs.Add(entry.Key, 0);
						}
						probs[entry.Key] += entry.Value;
					}
					pairCount++;
				}
			}
			return pairCount != 0 ? StructMath.MulValues(probs, 1.0f / pairCount) : probs;
		}
		#endregion

		#region - Utility methods. -

		/// <summary>
		/// Looks through training data and searches for entries, containing the given vector with respect to the accuracy parameter.
		/// </summary>
		/// <param name="v">Vector to look for.</param>
		/// <param name="eps">Accuracy parameter.</param>
		/// <returns>Training data entry if the search is successful or empty vector pair otherwise.</returns>
		protected List<VectorProb> FindAllVectorPairs(float[] v, float eps)
		{
			if (tData.Count == 0) return new List<VectorProb>();

			// implement averaging over vectors which are within the [eps] borders.
			var res = new List<VectorProb>();
			foreach (var pair in tData)
			{
				if (!VectorMath.Equal(pair.vector, v, eps)) continue;
				res.Add(pair);
			}
			return res;
		}

		/// <summary>
		/// Finds closest vector from the training set to the given one.
		/// </summary>
		/// <param name="v">Input vector.</param>
		/// <returns>Closest vector from the training set.</returns>
		protected VectorProb FindClosestVectorPair(float[] v, float eps)
		{
			var res = new VectorProb();
			var dist = float.MaxValue;
			foreach (var pair in tData)
			{
				var tempDist = VectorMath.EuclidianDistanceSqr(v, pair.vector);
				if ((eps == 0f && tempDist == eps && tempDist < dist) || (tempDist < eps && tempDist < dist))
				{
					dist = tempDist;
					res = pair;
				}
			}
			return res;
		}

		/// <summary>
		/// Returns the number of description vectors which are used for recognition.
		/// </summary>
		/// <returns></returns>
		public int GetTrainingVectorsCount ()
		{
			return tData.Count;
		}

		/// <summary>
		/// Save probabilistic classifier and all its data into a binary file.
		/// </summary>
		/// <param name="filename">Filename.</param>
		public virtual void Save(string filename)
		{
			FileIO.Serialize(filename, this);
		}

		/// <summary>
		/// Load probabilistic classifier and all its data from a binary file.
		/// </summary>
		/// <param name="filename">Filename.</param>
		public virtual void Load(string filename)
		{
			IFormatter formatter = new BinaryFormatter();
			Stream stream = new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.None);

			var probc = (ProbabilisticClassifier)formatter.Deserialize(stream);
			stream.Close();

			tData = new List<VectorProb>();
			if (probc.tData != null)
			{
				foreach (var sample in probc.tData) { tData.Add(sample); }
			}

			#region - Calculate number of classes in training data. -
			classCount = tData.Count > 0? tData[0].counts.Count : -1;
			#endregion
		}
		#endregion
	}

	/// <summary>
	/// [molecule]
	/// 
	/// Class for committee of [ProbabilisticClassifier] objects.
	/// </summary>
	[Serializable]
	public class ProbabilisticClassifierCommittee
	{
		private List<ProbabilisticClassifier> probc = new List<ProbabilisticClassifier>();

		/// <summary>
		/// List of algorithms.
		/// </summary>
		public List<ProbabilisticClassifier> Algorithms
		{
			get { return probc; }
		}

		/// <summary>
		/// Train [count] probabilistic classifiers.
		/// </summary>
		/// <param name="count">Number of classifiers to create.</param>
		/// <param name="data">Training data.</param>
		/// <param name="seedRate">Proportion of seed descriptions.</param>
		/// <param name="rand">RNG.</param>
		/// <param name="eps">Precision.</param>
		public void Train(int count, List<TrainingSample> data, float seedRate, Random rand, float eps)
		{
			probc.Clear();
			for (int i = 0; i < count; i++)
			{
				var tempC = new ProbabilisticClassifier();
				tempC.Train(data, seedRate, rand, eps);
				probc.Add(tempC);
			}
		}

		/// <summary>
		/// Recognize using averaging over trained classifiers.
		/// </summary>
		/// <param name="obj">Object description.</param>
		/// <param name="useVotes">Flag indicating that recognition should be based on votes rather than probabilities.</param>
		/// <param name="eps">Precision.</param>
		/// <returns>Class ID or -1 if no winning class is found.</returns>
		public int Recognize(float[,] obj, bool useVotes, float eps)
		{
			if (probc.Count == 0) return -1;

			var resVotes = new Dictionary<int, int>();
			var resProbs = new Dictionary<int, float>();
			foreach (var classifier in probc)
			{
				if (useVotes)
				{
					var temp = classifier.GetClassVotes(obj, eps);
					StructMath.Accumulate(ref resVotes, temp);
				}
				else
				{
					var temp = classifier.GetClassProbabilities(obj, eps);
					StructMath.Accumulate(ref resProbs, temp);
				}
			}

			return useVotes ? MachineLearningElements.GetMaxClassId(resVotes, -1)
							: MachineLearningElements.GetMaxClassId(resProbs, -1);
		}
	}
}
