﻿using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary;
using MentalAlchemy.Atomics;
using StructMath=MentalAlchemy.Atomics.StructMath;
using TrainingSample=MentalAlchemy.Atomics.TrainingSample;

namespace MentalAlchemy.Molecules.MachineLearning
{
	/// <summary>
	/// [molecule]
	/// 
	/// Class for k-NN method.
	/// </summary>
	[Serializable]
	public class KnnClassifier : IClassifier
	{
		#region - Protected variables. -
		protected const int UNDEFINED_CLASS_ID = -1;
		protected int kValue = 1;
		protected int classCount = -1;

		protected List<TrainingSample> tData; 
		#endregion

		#region - Public properties. -
		public int KValue { get { return kValue; } set { kValue = value;}}
		public List<TrainingSample> TrainingData { get { return tData; } }
		public static DistanceMeasure2D DistanceMeasure { get; set; }
		#endregion

		#region - Constructor. -
		public KnnClassifier () {}

		public KnnClassifier (KnnClassifier knn)
		{
			kValue = knn.kValue;
			tData = new List<TrainingSample>(knn.tData);
		}
		#endregion

		#region - Implementation of the [IClassifier] interface.
		public void Train(List<TrainingSample> data)
		{
			Train(data, DistanceMeasure, KValue);
		}

		public Dictionary<int, int> GetClassVotes(float[,] obj)
		{
			var dists = new List<float>();
			var clIds = new List<int>();
			dists.Add(float.MaxValue);
			clIds.Add(-1);

			#region - Calculate distances between objects.
			foreach (var entry in tData)
			{
				var dist = DistanceMeasure(entry.Data, obj);

				// insert found distance into the [dists] container.
				for (int j = 0; j < dists.Count; ++j)
				{
					if (dists[j] > dist)
					{
						dists.Insert(j, dist);
						clIds.Insert(j, entry.ClassID);
						break;
					}
				}
			}
			#endregion

			//
			// get [curK] first objects and make voting for the particular class.
			var votes = VectorMath.GetPartialHistogram(clIds.ToArray(), classCount, kValue);
			return votes;
		}

		public virtual Dictionary<int, float> GetClassProbabilities(float[,] obj)
		{
			var votes = GetClassVotes(obj);
			return StructMath.ConvertToProbabilities(votes);
		}

		public object Clone ()
		{
			return new KnnClassifier(this);
		}
		#endregion

		#region - Training & recognition interface methods (TrainingSample). -
		/// <summary>
		/// Train k-NN Recognizer.
		/// </summary>
		/// <param name="data">Training data.</param>
		/// <param name="distMeasure">Distance measure method delegate.</param>
		/// <param name="k">Neighborhood size.</param>
		public void Train(List<TrainingSample> data, DistanceMeasure2D distMeasure, int k)
		{
			tData = data;
			DistanceMeasure = distMeasure;
			classCount = MachineLearningElements.CalculateClasses(data.ToArray());
			kValue = k;
		}

		/// <summary>
		/// Define class ID for the given object. Should always be called *after* the method [Train].
		/// </summary>
		/// <param name="obj">Object's description.</param>
		/// <returns>Class ID.</returns>
		public int Recognize(float[,] obj)
		{
			var votes = GetClassVotes(obj);
			return MachineLearningElements.GetMaxClassId(votes, -1);

			//var dists = new List<float>();
			//var clIds = new List<int>();
			//dists.Add(float.MaxValue);
			//clIds.Add(-1);

			//#region - Calculate distances between objects.
			//foreach (var entry in tData)
			//{
			//    float dist = DistanceMeasure(entry.Data, obj);

			//    // insert found distance into the [dists] container.
			//    for (int j = 0; j < dists.Count; ++j)
			//    {
			//        if (dists[j] > dist)
			//        {
			//            dists.Insert(j, dist);
			//            clIds.Insert(j, entry.ClassID);
			//            break;
			//        }
			//    }
			//}
			//#endregion

			//return GetWinningClassId(dists, clIds, kValue, classCount);
		}

		/// <summary>
		/// Get ID of the class using information about distances between object under consideration and distances to all training samples. The decision is based upon voting among k closest objects.
		/// </summary>
		/// <param name="dists">Array of distances to all training samples.</param>
		/// <param name="clIds">Class IDs of the training samples.</param>
		/// <param name="curK">Neighborhood size.</param>
		/// <param name="clCount">Biggest value of the class ID (for performance).</param>
		/// <returns>ID of the winning class.</returns>
		public virtual int GetWinningClassId(List<float> dists, List<int> clIds, int curK, int clCount)
		{
			//
			// get [curK] first objects and make voting for the particular class.
			var classesHist = VectorMath.GetPartialHistogram(clIds.ToArray(), clCount, curK);
			var hist = new int[classesHist.Values.Count];
			classesHist.Values.CopyTo(hist, 0);

			var max = VectorMath.Max(hist);
			
			if (VectorMath.Calculate(hist, max) == 1)
			{	// if max count value is unique then the class gained the max number of votes wins.
				int maxIdx = VectorMath.FirstIndexOf(hist, max);
				var keys = new int[classesHist.Keys.Count];
				classesHist.Keys.CopyTo(keys, 0);
				return keys[maxIdx];
			}

			// if max count is not unique (e.g. two or more classes got the max number of votes)
			//	then we have an ambiguity situation.
			return UNDEFINED_CLASS_ID;
		}
		#endregion

		#region - Utility methods. -
		/// <summary>
		/// Save k-NN recognizer and all its data into a binary file.
		/// </summary>
		/// <param name="filename">Filename.</param>
		public virtual void Save(string filename)
		{
			IFormatter formatter = new BinaryFormatter();
			Stream stream = new FileStream(filename, FileMode.Create, FileAccess.Write, FileShare.None);
			formatter.Serialize(stream, this);
			stream.Close();
		}

		/// <summary>
		/// Load k-NN recognizer and all its data from a binary file.
		/// </summary>
		/// <param name="filename">Filename.</param>
		public virtual void Load(string filename)
		{
			IFormatter formatter = new BinaryFormatter();
			Stream stream = new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.None);

			var knn = (KnnClassifier)formatter.Deserialize(stream);
			stream.Close();

			kValue = knn.kValue;
			tData = new List<TrainingSample>();
			if (knn.tData != null)
			{
				foreach (TrainingSample sample in knn.tData) { tData.Add(sample); }
			}

			#region - Calculate number of classes in training data. -
			classCount = MachineLearningElements.CalculateClasses(tData.ToArray());
			#endregion
		}
		#endregion
	}
}
