﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using libsvm;
using MentalAlchemy.Atomics;

namespace MentalAlchemy.Molecules.MachineLearning
{
	/// <summary>
	/// Class for soft margin support vector machines using incremental active set method, INCAS.
	/// The alsogirhtm implementaion is based upon document (in Russian!):
	/// http://www.ccas.ru/voron/download/SVM.pdf
	/// </summary>
	public class SVM : IClassifier
	{
		protected float c = 1f;	// parameter for soft margin.

		public SVM (SVM svm)
		{
			// todo.
		}

		#region - Implementation of ICloneable. -
		public object Clone()
		{
			return new SVM(this);
		}
		#endregion

		#region - Implementation of IClassifier. -

		public void Train(List<TrainingSample> trainData)
		{
			var Io = new List<TrainingSample> (trainData);
			// Define starting point (starting support vectors).
			var classSets = MachineLearningElements.SepararateByClassId(Io);
			var Is = SvmElements.GetClosestTrainingPoints(classSets[0], classSets[1]);
			var Ic = new List<TrainingSample>();
			
			// Separate support vectors from all others.
			foreach (var sample in Is) {Io.Remove(sample);}

			// main training cycle.
			//do
			//{
			//    do
			//    {
			//        // todo: figure out lamdas number.
			//        throw new Exception();
			//        // todo: var lambdas = new float[];
			//        // todo: solve a constrained quadratic optimization problem wrt lambdas.

			//        //var idx = VectorMath.FirstLessOrEqualIndex(lambdas, 0f);
			//        //if (Is.Count > 2 && idx >= 0)
			//        //{	// Is[idx] is not a support vector.
			//        //    Io.Add(Is[idx]);
			//        //    Is.RemoveAt(idx);
			//        //}
			//        //idx = VectorMath.FirstGreaterOrEqualIndex(lambdas, 0f);
			//        //if (Is.Count > 2 && idx >= 0)
			//        //{	// Is[idx] is not a support vector.
			//        //    Ic.Add(Is[idx]);
			//        //    Is.RemoveAt(idx);
			//        //}
			//    } while (Is.Count > 0);	// note: this condition is probably incorrect!! The original variant is: "while there's element in Is which should be moved to Io or Ic".

			//    // todo: compute algorithm parameters w and w_0 using (1.4) and (1.7).
			//    // todo: compute margins m_i for each element in (Io U Ic).
			//    //var mo = new float[];	// todo: define number of margin distances for Io.
			//    //var mc = new float[];	// todo: define number of margin distances for Ic.

			//    var idx1 = VectorMath.FirstLessOrEqualIndex(mo, 1f);
			//    if (idx1 >= 0)
			//    {
			//        Is.Add(Io[idx1]);
			//        Io.RemoveAt(idx1);
			//    }
			//    idx1 = VectorMath.FirstGreaterOrEqualIndex(mc, 1f);
			//    if (idx1 >= 0)
			//    {
			//        Is.Add(Ic[idx1]);
			//        Ic.RemoveAt(idx1);
			//    }
			//} while (???);	// todo

			throw new System.NotImplementedException();
		}

		public int Recognize(float[,] obj)
		{
			throw new System.NotImplementedException();
		}

		public Dictionary<int, int> GetClassVotes(float[,] obj)
		{
			throw new System.NotImplementedException();
		}

		public Dictionary<int, float> GetClassProbabilities(float[,] obj)
		{
			throw new System.NotImplementedException();
		}

		#endregion
	}

	public class SvmElements
	{
		#region - Service methods. -
		/// <summary>
		/// Picks up two closest points from different classes for a given class 1 and class 2 datasets.
		/// The Euclidian distance measure is used.
		/// </summary>
		/// <param name="data1"></param>
		/// <param name="data2"></param>
		/// <returns></returns>
		public static List<TrainingSample> GetClosestTrainingPoints(List<TrainingSample> data1, List<TrainingSample> data2)
		{
			var res = new List<TrainingSample>();
			res.Add(data1[0]);
			res.Add(MachineLearningElements.GetClosestSample(res[0], data2, VectorMath.EuclidianDistance));

			do
			{
				var newV1 = MachineLearningElements.GetClosestSample(res[1], data1, VectorMath.EuclidianDistance);
				if (MatrixMath.Equals(res[0].Data, newV1.Data)) break;
				res[0] = newV1;	// else assign a new closest vector.

				var newV2 = MachineLearningElements.GetClosestSample(res[0], data2, VectorMath.EuclidianDistance);
				if (MatrixMath.Equals(res[0].Data, newV2.Data)) break;
				res[1] = newV2;	// else assign a new closest vector.
			} while (true);

			return res;
		} 
		#endregion

		#region - Methods for libsvm compatibility. -
		/// <summary>
		/// [molecule]
		/// 
		/// Creates [svm_problem] object from a set of training samples.
		/// Parameters of the svm_problem instance:
		///   l -- number of training samples (guess this is a kind of C++ arrays legacy, when
		///        one should specify explicitely the size of array).
		///   x -- training samples input signals.
		///   y -- training samples labels.
		/// 
		/// todo: check whether 'denormalization' for y's is required, so that outputs would be -1, +1, +2 etc.
		/// todo: instead of 0, 1, 2,...
		/// </summary>
		/// <param name="tData"></param>
		/// <returns></returns>
		public static svm_problem CreateSvmProblem (List<TrainingSample> tData)
		{
			var res = new svm_problem();
			res.l = tData.Count;

			res.x = new svm_node[tData.Count][];
			res.y = new double[tData.Count];
			for (int i = 0; i < tData.Count; i++)
			{
				res.y[i] = tData[i].ClassID;
				res.x[i] = ConvertToSvmNodes(tData[i]);
			}

			return res;
		}

		/// <summary>
		/// [molecule]
		/// 
		/// Converts given training sample into array of svm nodes.
		/// </summary>
		/// <param name="sample">Input training sample.</param>
		/// <returns>Resulting array of [svm_node] objects.</returns>
		public static svm_node[] ConvertToSvmNodes (TrainingSample sample)
		{
			var inputs = MatrixMath.ConvertToVector(sample.Data);
			var res = new svm_node[inputs.Length];
			for (int i = 0; i < inputs.Length; i++)
			{
				res[i].index = i;
				res[i].value_Renamed = inputs[i];
			}
			return res;
		}
		#endregion
	}
}
