﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using MentalAlchemy.Atomics;

namespace MentalAlchemy.Molecules.MachineLearning
{
	/// <summary>
	/// [molecule]
	/// 
	/// Implements AdaBoost.M1 algorithm.
	/// </summary>
	public class AdaBoostM1 : IClassifier
	{
		#region - Public properties. -
		/// <summary>
		/// Weak learner required for the boosting algorithm.
		/// </summary>
		public IClassifier WeakLearner { get; set; }

		/// <summary>
		/// Number of weak learners to train.
		/// </summary>
		public int TrainIterations { get; set; }

		/// <summary>
		/// Distribution over the training samples.
		/// </summary>
		public float[] SamplesDistribution { get; set; }

		public AdaBoostHypothesis TrainedClassifier { get; set; }
		#endregion
        
		#region - IClassifier interface implementation. -
		public void Train(List<TrainingSample> trainData)
		{
			TrainedClassifier = BoostingElements.TrainAdaBoostM1(trainData, WeakLearner, SamplesDistribution, TrainIterations);
		}

		public int Recognize(float[,] obj)
		{
			if (TrainedClassifier == null) return -1;

			//
			// calculate responces for each class ID.
			var resps = GetClassProbabilities(obj);

			//
			// and define winning class ID.
			var key = StructMath.FindMaxValueKey(resps);
			return key;
		}

		public Dictionary<int, int> GetClassVotes(float[,] obj)
		{
			throw new NotImplementedException("AdaBoostM1.GetClassVotes");
		}

		public Dictionary<int, float> GetClassProbabilities(float[,] obj)
		{
			var t = TrainedClassifier.Classifiers.Count;
			var res = new Dictionary<int, float>();

			for (int i = 0; i < t; i++)
			{
				var clId = TrainedClassifier.Classifiers[i].Recognize(obj);
				var probValue = -(float)Math.Log(TrainedClassifier.Beta[i]);

				if (!res.ContainsKey(clId)) res.Add(clId, probValue);
				else res[clId] += probValue;
			}

			return res;
		}

		public object Clone()
		{
			throw new System.NotImplementedException("AdaBoostM1.Clone");
		}
		#endregion
	}

	/// <summary>
	/// [molecule]
	/// 
	/// Class to represent AdaBoost training result.
	/// </summary>
	public class AdaBoostHypothesis
	{
		public float[] Beta { get; set;}
		public List<IClassifier> Classifiers { get; set;}

		public AdaBoostHypothesis ()
		{
			Classifiers = new List<IClassifier>();
		}
	}

	public class BoostingElements
	{
		public static AdaBoostHypothesis TrainAdaBoostM1(List<TrainingSample> trainData, IClassifier weakLearner, float[] distr, int iterCount)
		{
			var w = (float[])distr.Clone();
			var res = new AdaBoostHypothesis();
			res.Beta = new float[iterCount];

			for (int i = 0; i < iterCount; i++)
			{
				//
				// calculate normalized vector of weights.
				var p = (float[])w.Clone();
				var sump = VectorMath.Sum(p);
				VectorMath.Mul(ref p, 1f/sump);

				//
				// todo: modify classifiers to accept distribution [p] over samples.
				// todo: or make sampling from the trainData according to the [p].
				var sTrainData = new List<TrainingSample>(trainData);
				if (i != 0) sTrainData = MachineLearningElements.ResampleByDistribution(sTrainData, p);	// skip resampling on the 1st iteration.

				var cls = (IClassifier)weakLearner.Clone();
				cls.Train(sTrainData);

				//
				// calculate classifier's error.
				bool[] resps;
				var er = CalculateError(cls, trainData, p, out resps);
				if (er > 0.5) {break;}
				
				res.Classifiers.Add(cls);	// add classifier only if it has an acceptable test error.
				res.Beta[i] = er/(1 - er);

				//
				// update weights.
				for (int i1 = 0; i1 < w.Length; i1++)
				{
					if (resps[i1]) {w[i1] *= res.Beta[i];}
				}
			}

			return res;
		}

		public static float CalculateError (IClassifier cls, List<TrainingSample> data, float[] distr, out bool[] resps)
		{
			var er = 0f;
			resps = new bool[data.Count];
			for (int i = 0; i < data.Count; i++)
			{
				var sample = data[i];
				var clId = cls.Recognize(sample.Data);

				resps[i] = clId == sample.ClassID;
				if (clId != sample.ClassID)
				{
					er += distr[i];
				}
			}
			return er;
		}
	}
}
