/** @file discrete_hmm.h
 *
 *  @author Dongryeol Lee (drselee@gmail.com)
 */

#ifndef MLPACK_HMM_DISCRETE_HMM_H
#define MLPACK_HMM_DISCRETE_HMM_H

#include <armadillo>

#include "mlpack/hmm/hmm_arguments.h"
#include "mlpack/hmm/hmm_model.h"
#include "mlpack/hmm/hmm_result.h"

namespace mlpack {
namespace hmm {

class HMM {

  private:

    static bool BaumWelchConvergence_(
      double prev_likelihood, double new_likelihood, int current_num_iter,
      const core::abstract::AbstractArguments *arguments_in) {

      return
        fabs(prev_likelihood - new_likelihood) <=
        arguments_in->absolute_tolerance_ ||
        current_num_iter >= arguments_in->max_num_iterations_ ;
    }

    static void SmoothedProbabilities_(
      const arma::mat &filtered_probabilities_in,
      const arma::mat &future_conditional_likelihood_in,
      arma::mat * smoothed_probabilities_out) {

      int num_observations = filtered_probabilities_in.n_cols;
      int num_states = filtered_probabilities_in.n_rows;
      smoothed_probabilities_out->zeros(num_states, num_observations);

      for(int t = 0; t < num_observations; t++) {
        double normalization_constant = 0.0;
        double *current_filtered_probabilities =
          filtered_probabilities_in.colptr(t);
        double *current_future_conditional_likelihood =
          future_conditional_likelihood_in.colptr(t);
        double *current_smoothed_probabilities =
          smoothed_probabilities_out->colptr(t);
        for(int j = 0; j < num_states; j++) {
          current_smoothed_probabilities[j] =
            current_filtered_probabilities[j] *
            current_future_conditional_likelihood[j];
          normalization_constant += current_smoothed_probabilities[j];
        }
        if(normalization_constant > 0.0) {
          std::transform(
            current_smoothed_probabilities,
            current_smoothed_probabilities + num_states,
            current_smoothed_probabilities,
            std::bind2nd(
              std::divides<double>, normalization_constant));
        }
      }
    }

    static void BackwardBaumWelch_(
      const arma::SpCol<int> &observations_in,
      const mlpack::hmm::HmmModel &hmm_model_in,
      arma::mat *future_conditional_likelihood_out) {

      int num_states = hmm_model_in.transition_probabilities_.n_rows;
      int num_observations = observations_in.n_nonzero;

      // Initialize the output.
      future_conditional_likelihood_out->zeros(num_states, num_observations);

      // Initialize from the last time slice.
      std::fill(
        future_conditional_likelihood_out->colptr(
          num_observations - 1),
        future_conditional_likelihood_out->colptr(
          num_observations - 1) + num_states , 1.0);

      // Backward-recurse.
      for(int t = num_observations - 2; t >= 0; t--) {
        const double *next_future_conditional_likelihood =
          future_conditional_likelihood_out->colptr(t + 1);
        double *current_future_conditional_likelihood =
          future_conditional_likelihood_out->colptr(t);
        double normalization_constant = 0.0;
        for(int j = 0; j < num_states; j++) {
          double sum = 0.0;
          for(int i = 0; i < num_states; i++) {
            sum +=
              next_future_conditional_likelihood(i) *
              hmm_model_in.local_evidence_matrix_.at(i, observations_in[t]) *
              hmm_model_in.transition_probabilities_.at(j, i);
          }
          current_future_conditional_likelihood[j] = sum;
          normalization_constant += sum;
        }
        if(normalization_constant > 0.0) {
          std::transform(
            current_future_conditional_likelihood,
            current_future_conditional_likelihood + num_states,
            current_future_conditional_likelihood,
            std::bind2nd(
              std::divides<double>, normalization_constant));
        }
      }
    }

    static void ForwardBaumWelch_(
      const arma::SpCol<int> &observations_in,
      const mlpack::hmm::HMMModel &hmm_model_in,
      arma::mat *filtered_probabilities_out,
      arma::vec *normalization_constants_out) {

      int num_states = hmm_model_in.transition_probabilities_.n_rows;
      int num_observations = observations_in.n_elem;
      normalization_constants_out->zeros(num_observations);

      // Fill out each column sequentially. Each column is for a time
      // step.
      filtered_probabilities_out->zeros(num_states, num_observations);

      // Initialize.
      for(int j = 0; j < num_states; j++) {
        filtered_probabilities_out->at(j, 0) =
          hmm_model_in.prior_probabilities_[j] *
          hmm_model_in.local_evidence_matrix_.at(j, observations_in[0]);
        (*normalization_constants_out)[0] +=
          filtered_probabilities_out->at(j, 0);
      }
      if((* normalization_constants_out)[0] > 0.0) {
        std::transform(
          filtered_probabilities_out->colptr(0),
          filtered_probabilities_out->colptr(0) + num_states,
          filtered_probabilities_out->colptr(0),
          std::bind2nd(
            std::divides<double>, (*normalization_constants_out)[0]));
      }

      // Induct.
      for(int t = 1; t < num_observations; t++) {
        const double *previous_filtered_probabilities =
          filtered_probabilities_out->colptr(t - 1) ;
        double *current_filtered_probabilities =
          filtered_probabilities_out->colptr(t);
        for(int j = 0; j < num_states; j++) {
          double local_evidence =
            hmm_model_in.local_evidence_matrix_.at(j, observations_in[t]);
          double sum = 0.0;
          for(int i = 0; i < num_states; i++) {
            sum += previous_filtered_probabilities[i] *
                   hmm_model_in.transition_probabilities_.at(i, j);
          }
          current_filtered_probabilities[j] = sum * local_evidence;
          (*normalization_constants_out)[t] +=
            current_filtered_probabilities[j];
        } // end of the matrix-vector multiplication-like operation.

        if((*normalization_constants_out)[t] > 0.0) {
          std::transform(
            current_filtered_probabilities,
            current_filtered_probabilities + num_states,
            current_filtered_probabilities,
            std::bind2nd(
              std::divides<double>, (*normalization_constants_out)[t]));
        }
      } // end of looping over each observation slice.
    }

  public:

    static void ViterbiDecode(
      const arma::SpCol<int> &observations_in,
      const mlpack::hmm::HMMModel &hmm_model_in,
      std::vector<int> *decoded_states_out,
      double *probability_out) {

      int num_observations = observations_in.n_nonzero;
      int num_states = hmm_model_in.transition_probabilities_.n_rows;
      decoded_states_out->resize(num_observations);

      // The variables used for keeping track of the best path.
      arma::mat most_probable_neg_loglikelihoods(
        num_states, num_observations);
      arma::Mat<int> most_probable_preds(num_states, num_observations);

      // Base case.
      for(int i = 0; i < num_states; i++) {
        most_probable_neg_loglikelihoods.at(i, 0) =
          - log(hmm_model_in.prior_probabilities_[i]) -
          log(hmm_model_in.local_evidence_matrix_.at(observations_in[0], i));
        most_probable_preds.at(i,  0) = 0;
      }

      // Induct.
      for(int t = 1; t < num_observations; t++) {
        double *prev_most_probable_neg_loglikelihoods =
          most_probable_neg_loglikelihoods.colptr(t - 1);
        double *current_most_probable_neg_loglikelihoods =
          most_probable_neg_loglikelihoods.colptr(t);
        int *current_most_probable_preds = most_probable_preds.colptr(t) ;
        for(int j = 0; j < num_states; j++) {
          int min_state = 0;
          double min_weight =
            prev_most_probable_neg_loglikelihoods[0] -
            log(hmm_model_in.transition_probabilites_.at(0, j)) ;
          for(int i = 1; i < num_states; i++) {
            double weight =
              prev_most_probable_neg_loglikelihoods[i] -
              log(hmm_model_in.transition_probabilities_.at(i, j));
            if(weight < min_weight) {
              min_state = i;
              min_weight = weight;
            }
          }
          current_most_probable_neg_loglikelihoods[j] =
            min_weight -
            log(hmm_model_in.local_evidence_matrix_.at(j, observations_in[t]));
          current_most_probable_preds[j] = min_state ;
        }
      }

      // Now traceback to find the most probable path.
      int min_state = 0;
      double min_weight =
        most_probable_neg_loglikelihoods.at(0, num_observations - 1);
      for(int j = 1; j < num_states; j++) {
        if(most_probable_neg_loglikelihoods.at(
              j, num_observations - 1) < min_weight) {
          min_state = j;
          min_weight =
            most_probable_neg_loglikelihoods.at(j, num_observations - 1);
        }
      }

      decoded_states_out->back() = min_state;
      for(int t = num_observations - 2; t >= 0; t--) {
        const int *next_most_probable_preds =
          most_probable_preds.colptr(t + 1);
        (*decoded_states_out)[t] =
          next_most_probable_preds[(*decoded_states_out)[t + 1] ];
      }

      *probability_out = exp(- min_weight);
    }

    static void Train(
      const core::abstract::AbstractArguments *arguments_in,
      core::abstract::AbstractResult *result_out,
      core::abstract::AbstractModel *model_out) {

      // Guaranteed to be an HMMArguments, HMMResult, HMMModel here.
      const mlpack::hmm::HMMArguments *hmm_arguments =
        dynamic_cast< const mlpack::hmm::HMMArguments *>(arguments_in);
      if(! hmm_arguments) {
        std::cerr <<
                  "You need to call this function with an " <<
                  "HMMArgument object.\n";
        return false;
      }
      mlpack::hmm::HMMResult *hmm_result =
        dynamic_cast< mlpack::hmm::HMMResult *>(result_out);
      if(! hmm_result) {
        std::cerr <<
                  "You need to call this function with a " <<
                  "HMMResult object.\n";
        return false;
      }
      mlpack::hmm::HMMModel *hmm_model =
        dynamic_cast< mlpack::hmm::HMMModel *>(model_out);
      if(! hmm_model) {
        std::cerr <<
                  "You need to call this function with a " <<
                  "HMMModel object.\n";
        return false;
      }

      const core::data::SparseDataset *reference_table =
        dynamic_cast <
        const core::data::SparseDataset * >(hmm_arguments->reference_table_);
      if(! reference_table) {
        std::cerr <<
                  "You need to call this function with a SparseDataset object...\n";
        return false;
      }
      int num_points = reference_table->num_points();
      int num_states = hmm_arguments->num_states_;

      // Initialization.
      bool done = false;
      double prev_likelihood = - std::numeric_limits<double>::max();
      double new_likelihood = 0.0;
      std::vector< arma::mat * > smoothed_node_marginals(num_points);
      std::vector< arma::cube *> smoothed_edge_marginals(num_points);
      for(int i = 0; i < num_points; i++) {
        const arma::SpCol<double> &curr_sequence =
          reference_table->get_instance(i);
        int num_observations_for_curr_sequence = curr_sequence.n_nonzero;
        smoothed_node_marginals[i] =
          new arma::mat(num_states, num_observations_for_curr_sequence);
        smoothed_edge_marginals[i] =
          new arma::cube(
          num_states, num_states, num_observations_for_curr_sequence);
      }

      // The main Baum-Welch loop.
      do {

        // For each sequence,
        for(int i = 0; i < num_points; i++) {
          const arma::SpCol<double> &curr_sequence =
            reference_table->get_instance(i);
          int num_observations_for_curr_sequence = curr_sequence.n_nonzero;

        }

        if(BaumWelchConvergence_(
              prev_likelihood, new_likelihood,
              current_num_iter, hmm_arguments)) {
          done = true;
        }
        else {
        }

      }
      while(! done);
    }
};
}
}

#endif
