using System;
using System.Collections.Generic;
using MLSharp.Classification;
using MLSharp.Evaluation;
using MLSharp.Filters;
using MLSharp.Utilities;
using System.Linq;

namespace MLSharp.ConsoleRunner
{
	/// <summary>
	/// A harness that runs nested 10FCV.  The inner pass selects the best 
	/// </summary>
	public class NestedCrossValidationHarness : IHarness
	{
		#region Private Static Fields

		/// <summary>
		/// The logger for the harness.
		/// </summary>
		private static readonly log4net.ILog mLogger =
			log4net.LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);

		#endregion

		#region Private Fields

		/// <summary>
		/// The factories to run.
		/// </summary>
		private readonly IClassifierFactory[] mClassifierFactories;

		/// <summary>
		/// Stores classification results so that they can be written to the result writers.
		/// </summary>
		private IList<ClassificationResult> mResults;

		#endregion

		#region Public Properties

		/// <summary>
		/// Flag that indicates whether or not the harness should attempt
		/// to parallelize classifier execution.
		/// </summary>
		public bool RunInParallel { get; set; }

		/// <summary>
		/// The result writers to write output data to.
		/// </summary>
		public IList<IResultWriter> ResultWriters { get; private set; }

		/// <summary>
		/// The filters to apply to the dataset.
		/// </summary>
		public IList<IDataSetFilter> Filters { get; private set; }

		#endregion

		#region Public Constructors

		/// <summary>
		/// Creates a harness that will use the specified classifier factories.
		/// </summary>
		/// <param name="classifierFactories">The factories.</param>
		public NestedCrossValidationHarness(IClassifierFactory[] classifierFactories)
		{
			mClassifierFactories = classifierFactories;
			ResultWriters = new List<IResultWriter>();
			Filters = new List<IDataSetFilter>();
		}

		#endregion

		#region Private Methods

		/// <summary>
		/// Performs 10FCV with all of the classifiers to find the best
		/// one based on accuracy.  The index of the best classifier is
		/// returned.
		/// </summary>
		/// <param name="dataSet">The data set to use.</param>
		/// <returns>The index of the best classifier</returns>
		private int FindBestClassifier(IDataSet dataSet)
		{
			if (mLogger.IsDebugEnabled)
			{
				mLogger.Debug("Looking for the best classifier...");
			}

			//Partition the data
			Partition[] partitions = KFold.GetPartitions(dataSet, 10);

			//Allocate an array to hold the accuracy totals for each classifier
			double[] accuracySums = new double[mClassifierFactories.Length];

			//foreach partition
			foreach (Partition p in partitions)
			{
				//Apply any filters that weren't applied earlier.
				foreach (IDataSetFilter filter in Filters)
				{
					//Normal filters have already been applied to the dataset.  
					IRequiresInitializationFilter initializableFilter = filter as IRequiresInitializationFilter;

					if (initializableFilter != null)
					{
						//Lock, the filter reference, because the filter might not be thread-safe
						//across calls.
						lock (initializableFilter)
						{
							//Init it
							mLogger.InfoFormat("Initializing filter {0} on training data.", filter.GetType().Name);
							initializableFilter.Initialize(p.Training);

							mLogger.InfoFormat("Applying filter {0} to both training and validation data.", filter.GetType().Name);

							//Apply it to both training and testing
							initializableFilter.Transform(p.Training);
							initializableFilter.Transform(p.Validation);
						}
					}
				}

				//Train the classifiers and get the results for the validation sets
				for (int i=0; i < mClassifierFactories.Length; i++)
				{
					if (mLogger.IsDebugEnabled)
					{
						mLogger.DebugFormat("Training classifier from {0} in nested 10FCV...", mClassifierFactories[i]);
					}

					IClassifier classifier = mClassifierFactories[i].BuildClassifier(p.Training);

					if (mLogger.IsDebugEnabled)
					{
						mLogger.DebugFormat("{0} is classifying validation set...", classifier);
					}

					ClassificationResult[] results = classifier.Classify(p.Validation);

					//Calculate the accuracies for each and add it to their sums
					int correct = CountCorrect(results, p.Validation);

					if (mLogger.IsDebugEnabled)
					{
						mLogger.DebugFormat("Accuracy for {0}: {1}/{2} [{3}]", classifier, correct, p.Validation.Instances.Count,
						                    correct/(double) p.Validation.Instances.Count);
					}

					accuracySums[i] += correct;
				}
			}
			
			//Dump the classifeir accuracies
			if (mLogger.IsInfoEnabled)
			{
				mLogger.Info("Classifier Accuracies");
				mLogger.Info("---------------------");

				for (int i=0; i < accuracySums.Length; i++)
				{
					mLogger.InfoFormat("Classifier {0}: {1}/{2} [{3}]", mClassifierFactories[i], accuracySums[i], dataSet.Instances.Count,
					                   accuracySums[i]/dataSet.Instances.Count);
				}
			}

			//Find the maximum accuracy, and return its index.
			int maxIndex = 0;

			for (int i=1; i < accuracySums.Length; i++)
			{
				if (accuracySums[i] > accuracySums[maxIndex])
				{
					maxIndex = i;
				}
			}

			return maxIndex;
		}

		/// <summary>
		/// Counts the number of correct predictions.
		/// </summary>
		/// <param name="results">The results.</param>
		/// <param name="dataSet">The data set that was classified.</param>
		/// <returns></returns>
		private int CountCorrect(IList<ClassificationResult> results, IDataSet dataSet)
		{
			int correct = 0; 

			for (int i=0; i < results.Count; i++)
			{
				if (results[i].PredictedClass == dataSet.Instances[i].ClassValue)
				{
					correct++;
				}
			}

			return correct;
		}

		/// <summary>
		/// Performs an outer loop of 10FCV.
		/// </summary>
		/// <param name="p"></param>
		private void ProcessPartition(Partition p)
		{
			//Find the best classifier.
			int bestIndex = FindBestClassifier(p.Training);

			mLogger.DebugFormat("The best classifier factory is {0} at index {1}.", mClassifierFactories[bestIndex], bestIndex);

			mLogger.DebugFormat("Training classifier from {0} (index {0})", mClassifierFactories[bestIndex], bestIndex);

			IClassifier classifier = mClassifierFactories[bestIndex].BuildClassifier(p.Training);

				mLogger.DebugFormat("{0} is classifying validation set...", classifier);

			ClassificationResult[] results = classifier.Classify(p.Validation);

			mLogger.DebugFormat("{0} has finished classifying validation set.", classifier);

			//Copy the results to temporary storage.
			lock (mResults)
			{
				//TODO: Don't buffer the results, just stream them directly to the output.
				mResults.AddRange(results);
			}
		}

		#endregion

		#region Public Methods

		/// <summary>
		/// Runs the harness.
		/// </summary>
		/// <param name="dataSet">The data set to use.</param>
		public void Run(IDataSet dataSet)
		{
			//Allocate space to hold the results
			mResults = new List<ClassificationResult>();

			//Check for filters that need to be applied across the entire dataset.
			foreach (IDataSetFilter filter in Filters)
			{
				//Skip filters that require initialization, which should be initialized
				//on the training data.
				if (filter is IRequiresInitializationFilter)
				{
					continue;
				}

				mLogger.InfoFormat("Applying filter {0} to the dataset.", filter);

				filter.Transform(dataSet);
			}

			//Partition the data.
			Partition[] partitions = KFold.GetPartitions(dataSet, 10);

			if (RunInParallel)
			{
				Partition.ParallelForEach(partitions, p => ProcessPartition(p));
			}
			else
			{
				//For each partition
				foreach (Partition p in partitions)
				{
					ProcessPartition(p);
				}
			}

			if (ResultWriters.Count > 0)
			{
				ClassificationResult[] results = mResults.ToArray();

				foreach (IResultWriter writer in ResultWriters)
				{
					writer.ProcessResults(dataSet, results);

					//If the writer is disposable
					IDisposable disposable = writer as IDisposable;
					if (disposable != null)
					{
						disposable.Dispose();
					}
				}
			}

			int correct = CountCorrect(mResults, dataSet);

			mLogger.InfoFormat("Finished nested 10-fold cross validation, final accuracy: {0}/{1} [{2}]", correct, dataSet.Instances.Count,
                   correct/(double)dataSet.Instances.Count);

			//Free up the memory used.
			mResults = null;
		}

		#endregion
	}
}
