﻿#region Copyright information
// 
// Copyright © 2005-2013 Yongkee Cho. All rights reserved.
// 
// This code is a part of the Biological Object Library and governed under the terms of the
// GNU Lesser General  Public License (LGPL) version 2.1 which accompanies this distribution.
// For more information on the LGPL, please visit http://bol.codeplex.com/license.
// 
// - Filename: GeneralizedHiddenMarkovModel.cs
// - Author: Yongkee Cho
// - Email: yongkeecho@gmail.com
// - Date Created: 2013-01-24 4:34 PM
// - Last Modified: 2013-01-25 3:59 PM
// 
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
using BOL.Linq.Probability;
using BOL.Maths.Distributions;
using BOL.Linq;

namespace BOL.Algorithms.StateMachines
{
    public class GeneralizedHiddenMarkovModel<TState, TObservable> : HiddenMarkovModel<TState, TObservable>, IGeneralizedHiddenMarkovModel<TState, TObservable>, ICloneable
        where TObservable: struct, IComparable
    {
        #region Public properties

        public IDictionary<TState, IDistribution<int>> LengthDistribution { get; set; }
        
        #endregion

        #region Constructors

        public GeneralizedHiddenMarkovModel(IDictionary<TState, double> initialDistribution, IDictionary<TState, IDistribution<int>> lengthDistribution, IDictionary<TState, IDistribution<TObservable>> emissionDistribution, IDictionary<TState, IDistribution<TState>> transitionDistribution)
            : base(initialDistribution, emissionDistribution, transitionDistribution)
        {
            if (lengthDistribution == null)
                throw new ArgumentNullException("lengthDistribution");

            LengthDistribution = lengthDistribution;
        }

        #endregion

        #region ICloneable implementation

        public new GeneralizedHiddenMarkovModel<TState, TObservable> Clone()
        {
            return new GeneralizedHiddenMarkovModel<TState, TObservable>(
                InitialDistribution.ToDictionary(x => x.Key, x => x.Value),
                LengthDistribution.ToDictionary(x => x.Key, x => x.Value.Clone() as IDistribution<int>),
                EmissionDistribution.ToDictionary(x => x.Key, x => x.Value.Clone() as IDistribution<TObservable>),
                TransitionDistribution.ToDictionary(x => x.Key, x => x.Value.Clone() as IDistribution<TState>)
                );
        }

        object ICloneable.Clone()
        {
            return Clone();
        }

        #endregion

        #region Public methods

        public new IEnumerable<Tuple<TState, TObservable>> Generate(Random r, int length)
        {
            if (r == null)
                throw new ArgumentNullException("r");

            var current = InitialDistribution.Sample(r);
            var prev = default(TState);
            var currentLength = LengthDistribution[current].Sample(r);
            var i = 0;
            var j = 0;

            while (i++ < length)
            {
                if (j++ < currentLength)
                    yield return new Tuple<TState, TObservable>(current, EmissionDistribution[current].Sample(r));
                else
                {
                    if (!TransitionDistribution.ContainsKey(current))
                        yield break;
                    current = TransitionDistribution[current].Sample(r);
                    if (!prev.Equals(current))
                    {
                        currentLength = LengthDistribution[current].Sample(r);
                        j = 0;
                    }

                    prev = current;
                }
            }
        }

        //public void Train<TSource>(IEnumerable<TSource> source, Func<TSource, TState> stateSelector, Func<TSource, TObservable> observableSelector)
        //{
        //    if (source == null)
        //        throw new ArgumentNullException("source");
        //    if (stateSelector == null)
        //        throw new ArgumentNullException("stateSelector");
        //    if (observableSelector == null)
        //        throw new ArgumentNullException("observableSelector");

        //    var array = source.ToArray();
        //    var states = array.Select(stateSelector).ToArray();

        //    // Updates emission distribution
        //    var emissionDistribution = array.GroupBy(stateSelector, observableSelector, (key, observables) => new { Key = key, Source = observables })
        //        .ToDictionary(x => x.Key, x => x.Source);
        //    EmissionDistribution.ForEach(x => x.Value.MaximumLikelihoodEstimate(emissionDistribution[x.Key]));

        //    // Updates transition distribution
        //    var transitionDistribution = InitialDistribution.ToDictionary(x => x.Key, x => new List<TState>());
        //    foreach (var key in states.Window(2).Select(x => x.ToArray()))
        //        transitionDistribution[key[0]].Add(key[1]);
        //    TransitionDistribution.ForEach(x => x.Value.MaximumLikelihoodEstimate(transitionDistribution[x.Key]));
        //}

        public new void Train<TSource>(IEnumerable<IEnumerable<TSource>> source, Func<TSource, TState> stateSelector, Func<TSource, TObservable> observableSelector)
        {
            if (source == null)
                throw new ArgumentNullException("source");
            if (stateSelector == null)
                throw new ArgumentNullException("stateSelector");
            if (observableSelector == null)
                throw new ArgumentNullException("observableSelector");

            var list = source.Select(x => x.ToArray()).ToList();
            var states = list.Select(x => x.Select(stateSelector).ToArray()).ToList();

            // Updates initial distribution
            var firsts = states.Select(x => x[0]).ToList();
            InitialDistribution = InitialDistribution.ToDictionary(x => x.Key, x => (double)firsts.Count(y => x.Key.Equals(y))).Normalize();

            // Updates emission distribution
            var emissionDistribution = list.SelectMany(x => x)
                .GroupBy(stateSelector, observableSelector, (key, observables) => new { Key = key, Source = observables })
                .ToDictionary(x => x.Key, x => x.Source);
            EmissionDistribution.ForEach(x => x.Value.MaximumLikelihoodEstimate(emissionDistribution[x.Key]));

            // Updates length & transition distribution
            var transitionDistribution = InitialDistribution.ToDictionary(x => x.Key, x => new List<TState>());
            var length = InitialDistribution.ToDictionary(x => x.Key, x => 0);
            var lengths = InitialDistribution.ToDictionary(x => x.Key, x => new List<int>());
            foreach (var key in states.SelectMany(x => x.Window(2).Select(window => window.ToArray())))
            {
                if (key[0].Equals(key[1]))
                    length[key[0]]++;
                else
                {
                    lengths[key[0]].Add(length[key[0]]);
                    length[key[0]] = 0;
                }

                transitionDistribution[key[0]].Add(key[1]);
            }
            LengthDistribution.ForEach(x => x.Value.MaximumLikelihoodEstimate(lengths[x.Key]));
            TransitionDistribution.ForEach(x => x.Value.MaximumLikelihoodEstimate(transitionDistribution[x.Key]));
        }

        public void BaumWelch(IEnumerable<TObservable> observables, Func<IEnumerable<int>, IDistribution<int>> lengthEstimator, Func<IEnumerable<TObservable>, IDistribution<TObservable>> emissionEstimator, int numberOfIterations)
        {
            throw new NotImplementedException();

            //if (observables == null)
            //    throw new ArgumentNullException("observables");
            //if (emissionEstimator == null)
            //    throw new ArgumentNullException("lengthEstimator");
            //if (emissionEstimator == null)
            //    throw new ArgumentNullException("emissionEstimator");

            //var seq = observables.ToArray();
            //var seqLength = seq.Length;
            //var states = InitialDistribution.Keys.ToList();
            //var numberOfStates = states.Count;
            //var forward = new double[seqLength + 1, numberOfStates];
            //var backward = new double[seqLength + 1, numberOfStates];
            //var emissionSeqs = EmissionDistribution.ToDictionary(x => x.Key, x => new List<TObservable>());
            //var transitionProb = new double[numberOfStates, numberOfStates];
            //var tempVector = new double[numberOfStates];
            //var tempMatrix = new double[numberOfStates * numberOfStates];
            //int i, j, k, m;
            //double temp;

            //for (j = 0; j < numberOfIterations; j++)
            //{
            //    for (k = 0; k < numberOfStates; k++)
            //    {
            //        forward[0, k] = Math.Log(InitialDistribution[states[k]]);
            //        backward[0, k] = 0;
            //    }

            //    // fills forward matrix
            //    for (i = 1; i <= seqLength; i++)
            //        for (m = 0; m < numberOfStates; m++)
            //        {
            //            for (k = 0; k < numberOfStates; k++)
            //                tempVector[k] = forward[i - 1, k] + Math.Log(TransitionDistribution[states[k]][states[m]]);
            //            forward[i, m] = tempVector.LogSumOfExponentials() +
            //                            Math.Log(EmissionDistribution[states[m]].Pdf(seq[i - 1]));
            //        }

            //    // fills backward matrix
            //    for (i = seqLength - 1; i >= 1; i--)
            //        for (k = 0; k < numberOfStates; k++)
            //        {
            //            for (m = 0; m < numberOfStates; m++)
            //                tempVector[m] = backward[i + 1, m] + Math.Log(TransitionDistribution[states[k]][states[m]]) +
            //                                Math.Log(EmissionDistribution[states[m]].Pdf(seq[i]));
            //            backward[i, k] = tempVector.LogSumOfExponentials();
            //        }

            //    // recursion
            //    for (i = 1; i <= seqLength; i++)
            //    {
            //        for (k = 0; k < numberOfStates; k++)
            //            tempVector[k] = forward[i, k] + backward[i, k];

            //        temp = tempVector.LogSumOfExponentials();
            //        for (k = 0; k < numberOfStates; k++)
            //            tempVector[k] = Math.Exp(tempVector[k] - temp);

            //        emissionSeqs[states[tempVector.ArgMax()]].Add(seq[i - 1]);

            //        for (k = 0; k < numberOfStates; k++)
            //            for (m = 0; m < numberOfStates; m++)
            //                tempMatrix[k * numberOfStates + m] = forward[i - 1, k] + Math.Log(TransitionDistribution[states[k]][states[m]]) + Math.Log(EmissionDistribution[states[m]].Pdf(seq[i - 1])) + backward[i, m];

            //        temp = tempMatrix.LogSumOfExponentials();
            //        for (k = 0; k < numberOfStates; k++)
            //            for (m = 0; m < numberOfStates; m++)
            //                transitionProb[k, m] += Math.Exp(tempMatrix[k * numberOfStates + m] - temp);
            //    }

            //    // maximization
            //    EmissionDistribution = EmissionDistribution.ToDictionary(x => x.Key, x => emissionEstimator(emissionSeqs[x.Key]));
            //    TransitionDistribution = TransitionDistribution.ToDictionary(x => x.Key, x => x.Value.ToDictionary(y => y.Key, y => transitionProb[states.IndexOf(x.Key), states.IndexOf(y.Key)]).Normalize());
            //}
        }

        public new IEnumerable<TState> Viterbi(IEnumerable<TObservable> observables)
        {
            throw new NotImplementedException();
            
            //if (observables == null)
            //    throw new ArgumentNullException("observables");

            //var seq = observables.ToArray();
            //var seqLength = seq.Length;
            //var states = InitialDistribution.Keys.ToArray();
            //var numberOfStates = states.Length;
            //var tempMatrix = new double[seqLength + 1, numberOfStates];
            //var tempVector = new double[numberOfStates];
            //var mostLikelyStates = new TState[seqLength];
            //int i, k, m;

            //// Initialization
            //for (k = 0; k < numberOfStates; k++)
            //    tempMatrix[0, k] = Math.Log(InitialDistribution[states[k]]);

            //// Recursion
            //for (i = 1; i <= seqLength; i++)
            //    for (m = 0; m < numberOfStates; m++)
            //    {
            //        for (k = 0; k < numberOfStates; k++)
            //            tempVector[k] = tempMatrix[i - 1, k] + Math.Log(TransitionDistribution[states[k]][states[m]]);
            //        tempMatrix[i, m] = tempVector.Max() + Math.Log(EmissionDistribution[states[m]].Pdf(seq[i - 1]));
            //    }

            //// Traceback
            //for (k = 0; k < numberOfStates; k++)
            //    tempVector[k] = tempMatrix[seqLength, k];
            //m = tempVector.ArgMax();

            //for (i = seqLength; i > 0; i--)
            //{
            //    for (k = 0; k < numberOfStates; k++)
            //        tempVector[k] = tempMatrix[i - 1, k] + Math.Log(TransitionDistribution[states[k]][states[m]]);

            //    k = tempVector.ArgMax();
            //    mostLikelyStates[i - 1] = states[k];

            //    m = k;
            //}

            //return mostLikelyStates;
        }

        public new IDictionary<TState, double[]> DecodePosterior(IEnumerable<TObservable> observables)
        {
            throw new NotImplementedException();

            //if (observables == null)
            //    throw new ArgumentNullException("observables");

            //var seq = observables.ToArray();
            //var seqLength = seq.Length;
            //var states = InitialDistribution.Keys.ToArray();
            //var numberOfStates = states.Length;
            //var forward = new double[seqLength + 1, numberOfStates];
            //var backward = new double[seqLength + 1, numberOfStates];
            //var temp = new double[numberOfStates];
            //var posterior = new double[numberOfStates][];
            //int i, k, m;

            //for (k = 0; k < numberOfStates; k++)
            //{
            //    forward[0, k] = Math.Log(InitialDistribution[states[k]]);
            //    backward[0, k] = 0;
            //    posterior[k] = new double[seqLength];
            //}

            //// fills forward matrix
            //for (i = 1; i <= seqLength; i++)
            //    for (m = 0; m < numberOfStates; m++)
            //    {
            //        for (k = 0; k < numberOfStates; k++)
            //            temp[k] = forward[i - 1, k] + Math.Log(TransitionDistribution[states[k]][states[m]]);
            //        forward[i, m] = temp.LogSumOfExponentials() + Math.Log(EmissionDistribution[states[m]].Pdf(seq[i - 1]));
            //    }

            //// fills backward matrix
            //for (i = seqLength - 1; i >= 1; i--)
            //    for (k = 0; k < numberOfStates; k++)
            //    {
            //        for (m = 0; m < numberOfStates; m++)
            //            temp[m] = backward[i + 1, m] + Math.Log(TransitionDistribution[states[k]][states[m]]) + Math.Log(EmissionDistribution[states[m]].Pdf(seq[i]));
            //        backward[i, k] = temp.LogSumOfExponentials();
            //    }

            //// recursion
            //for (i = 1; i <= seqLength; i++)
            //{
            //    for (k = 0; k < numberOfStates; k++)
            //        temp[k] = forward[i, k] + backward[i, k];

            //    var norm = temp.LogSumOfExponentials();
            //    for (k = 0; k < numberOfStates; k++)
            //        posterior[k][i - 1] = Math.Exp(temp[k] - norm);
            //}

            //return states.ToDictionary(x => x, x => posterior[Array.IndexOf(states, x)]);
        }

        #endregion

        #region IEquatable<IGeneralizedHiddenMarkovModel<TState, TObservable>> implementation

        public bool Equals(IGeneralizedHiddenMarkovModel<TState, TObservable> other)
        {
            if (other == null)
                throw new ArgumentNullException("other");

            return base.Equals(other) && LengthDistribution.DictionaryEqual(other.LengthDistribution) && EmissionDistribution.DictionaryEqual(other.EmissionDistribution);
        }

        #endregion

        #region Object overriden

        public override int GetHashCode()
        {
            return LengthDistribution.Aggregate(EmissionDistribution.Aggregate(base.GetHashCode(), (hashCode, current) => hashCode ^ current.Value.GetHashCode()), (hashCode, current) => hashCode ^ current.Value.GetHashCode());
        }

        public override bool Equals(object other)
        {
            if (other == null)
                throw new ArgumentNullException("other");

            if (!(other is IGeneralizedHiddenMarkovModel<TState, TObservable>))
                throw new InvalidCastException("The 'other' argument is not a IGeneralizedHiddenMarkovModel<TState, TObservable> object.");

            return Equals(other as IGeneralizedHiddenMarkovModel<TState, TObservable>);
        }

        public override string ToString()
        {
            return String.Format("{0} {{States = {{{1}}}, Transitions = {{{2}}}}}",
                GetType().Name,
                String.Join(", ", InitialDistribution.Select(x => String.Format("{0} ({1:0.0000}: {2}, {3})", x.Key, x.Value, LengthDistribution[x.Key], EmissionDistribution[x.Key]))),
                String.Join(", ", TransitionDistribution.Select(x => String.Format("{0}->{1}", x.Key, x.Value))));
        }

        #endregion
    }
}
