#ifndef LDA_VB_CC
#define LDA_VB_CC
#include<string>
#include<fstream>
#include<sstream>
#include<iostream>
#include<algorithm>
#include<iterator>
#include<vector>
#include<map>
#include<cstdlib>
#include<ctime>
#include<cmath>
#include "def.hpp"
#include "utils.hpp"
#include "log.hpp"
#include "topicmodel_lda_abstract.hpp"

class LDA_VB:public LatentDirichletAllocation{
private:
  int var_iter; // the number of iterations for variational inference
  int Single_E_Step(int);
  int Single_M_Step();
  double loggamma_alpha_sum;
  double sum_loggamma_alpha;
  double loggamma_beta_sum;
  double sum_loggamma_beta;
  double lowerbound;

  double alpha_sum;
  double* var_lambda_k; // the sum of variational paramter var_lambda[k]
  double** ss_lambda_k; // the sufficient statistics for var lambda
  double* var_rho_old; // the cache of each var rho k
  double* var_old_gamma; // the cache of old var gamma values
  double* cache_digamma_var_gamma; // the cache of digamma values of var gamma
  double cache_digamma_var_gamma_sum; // the cache of digamma values of sum of var gamma
  double cache_loggamma_sum_var_gamma; // the cache of log-gamma values of var gamma
  double cache_sum_loggamma_var_gamma; // the cache of sum of log-gamma values of var gamma
  double** cache_digamma_var_lambda; // the cache of digamma values of individual var lambda
  double* cache_digamma_var_lambda_k; // the cache of digamma values of var lambda k
  double* cache_loggamma_var_lambda_k;
  double* cache_sum_loggamma_var_lambda_k;
  void Save_Expectation_Theta_VarGamma(int);
  void Save_Expectation_Phi_VarLambda();
  ofstream file_theta;
  ofstream file_phi;
  ofstream file_log;
protected:
  double* var_gamma; // variational parameters; documents X topics; Dirichlet
  double** var_rho; // variational parameters; terms in a document X topics; Multinomial
  double** var_lambda; // variational parameters; topics X terms; Dirichlet
public:
  virtual void Inference(); // in this function, we do not save var gamma per document
  virtual void Inference(bool); // in this function, we can have the choice 
  virtual void Prediction();
  virtual double Likelihood();
  virtual void SaveModel();
  virtual int GetDocuments();
  //  virtual void LoadModel();
  LDA_VB();
};

LDA_VB::LDA_VB(){
  // allocate variational parameters
  
  allocate_memory<double>(var_lambda,TOPIC_NUM,TERM_NUM);
  allocate_memory<double>(cache_digamma_var_lambda,TOPIC_NUM,TERM_NUM);
  allocate_memory<double>(ss_lambda_k,TOPIC_NUM,TERM_NUM);
  var_lambda_k = new double[TOPIC_NUM];
  var_rho_old = new double[TOPIC_NUM];
  var_gamma = new double[TOPIC_NUM];
  var_old_gamma = new double[TOPIC_NUM];
  cache_digamma_var_gamma = new double[TOPIC_NUM];
  cache_digamma_var_lambda_k = new double[TOPIC_NUM];
  cache_loggamma_var_lambda_k = new double[TOPIC_NUM];
  cache_sum_loggamma_var_lambda_k = new double[TOPIC_NUM];
  alpha_sum = get_sum(alpha,TOPIC_NUM);
  cache_loggamma_sum_var_gamma = 0.0;
  cache_sum_loggamma_var_gamma = 0.0;
  
  loggamma_alpha_sum = log_gamma(alpha_sum);
  loggamma_beta_sum = log_gamma(get_sum(beta,TERM_NUM));
  
  sum_loggamma_alpha = sum_log_gamma(alpha, TOPIC_NUM);
  sum_loggamma_beta = sum_log_gamma(beta, TERM_NUM);

  // initialize var lambda
  for(int k=0; k < TOPIC_NUM; k++){
    var_lambda_k[k] = 0;
    cache_sum_loggamma_var_lambda_k[k] = 0.0;

    for(int t=0; t < TERM_NUM; t++){
      var_lambda[k][t] = random_gamma_distribution(100,0.01); // random initialize each component of lambda 
      var_lambda_k[k] = var_lambda_k[k] + var_lambda[k][t];
      cache_digamma_var_lambda[k][t] = digamma(var_lambda[k][t]);
      cache_sum_loggamma_var_lambda_k[k] += log_gamma(var_lambda[k][t]);
    }
    cache_digamma_var_lambda_k[k] = digamma(var_lambda_k[k]);
    cache_loggamma_var_lambda_k[k] = log_gamma(var_lambda_k[k]);
  }

  lowerbound = 0.0;
  LEARNING_MODE = 0; // batch mode;  
  READ_MODE = 0; // this is already set in the base class
  PuzaLogger::Instance()->PutString("Variational Inference for LDA initialized.");
}

void LDA_VB::SaveModel(){
  // 
}

int LDA_VB::GetDocuments(){
  // batch mode
  for(int i=0; i < DOC_NUM; i++){
    TermList temp;
    TM_DOCS.push_back(temp);
  }
  return (*ReadFunc)(TM_DOCS,MAX_INT,READ_MODE,LEARNING_MODE);
}


int LDA_VB::Single_E_Step(int d){
  int small_iter = 0;
  TermList current_list = TM_DOCS[d];
  int term_count = current_list.size();
  // clean var gamma
  int N = 0;
  for(int i=0; i < term_count; i++){                                                                                                                     
    int count = current_list[i].term_stat;                                                                                                                 
    N += count;                                                                                                                                           
    for(int k=0; k< TOPIC_NUM; k++){                                                                                                               
      var_rho[i][k] = 1.0 / TOPIC_NUM;                                 
    }                                                                                                                                                      
  }
 
  for(int k=0; k< TOPIC_NUM; k++){
    var_gamma[k] = alpha[k] + float(N) / TOPIC_NUM;                                                                   
    cache_digamma_var_gamma[k] = digamma(var_gamma[k]);
  }

  // E-step for one single document
  double var_rho_sum =0;
  while(1){
    // some cleaning
    for(int k=0; k < TOPIC_NUM; k++){
      var_old_gamma[k] = var_gamma[k];
    }    

    for(int i=0; i < term_count; i++){
      int term_id = current_list[i].term_id;
      int count = current_list[i].term_stat;
      var_rho_sum = 0;
      // calculate un-normalized phi
      for(int k=0; k < TOPIC_NUM; k++){
	var_rho_old[k] = var_rho[i][k];
	var_rho[i][k] = cache_digamma_var_lambda[k][term_id] - cache_digamma_var_lambda_k[k] + cache_digamma_var_gamma[k]; // in log space
	if(k == 0){
	  var_rho_sum = var_rho[i][k];
	}
	else{
	  var_rho_sum = log_sum(var_rho_sum,var_rho[i][k]);
	}
      }
      // normalize phi
      for(int k=0; k < TOPIC_NUM; k++){
	var_rho[i][k] = exp(var_rho[i][k] - var_rho_sum);
	var_gamma[k] = var_gamma[k] + count * (var_rho[i][k] - var_rho_old[k]);
	cache_digamma_var_gamma[k] = digamma(var_gamma[k]);
      }
    }

    // calculate gamma and detect convergence
    double error = 0.0;
    for(int k=0; k < TOPIC_NUM; k++){
      error = error + fabs(var_gamma[k] - var_old_gamma[k]);
    }

    if(error/TOPIC_NUM < 0.00001)
      break;
    if(small_iter >= 500)
      break;
    small_iter ++;    
  }
  //  exit(0);
  cache_digamma_var_gamma_sum = 0.0;
  cache_sum_loggamma_var_gamma = 0.0;
  cache_loggamma_sum_var_gamma = 0.0;
  for(int k=0; k < TOPIC_NUM; k++){
    cache_digamma_var_gamma_sum += var_gamma[k];
    cache_loggamma_sum_var_gamma += var_gamma[k];
    cache_sum_loggamma_var_gamma += log_gamma(var_gamma[k]);
  }
  cache_digamma_var_gamma_sum = digamma(cache_digamma_var_gamma_sum);
  cache_loggamma_sum_var_gamma = log_gamma(cache_loggamma_sum_var_gamma);

  double result = 0.0;
  result = result + loggamma_alpha_sum - sum_loggamma_alpha;
  result = result - cache_loggamma_sum_var_gamma + cache_sum_loggamma_var_gamma;
  for(int k=0; k < TOPIC_NUM; k++){
    result = result + (alpha[k] - 1) * (cache_digamma_var_gamma[k] - cache_digamma_var_gamma_sum);
    result = result - (var_gamma[k] - 1) * (cache_digamma_var_gamma[k] - cache_digamma_var_gamma_sum);
    for(int i=0; i < term_count; i++){
      int term_id = current_list[i].term_id;
      int count = current_list[i].term_stat;
      result = result + count * var_rho[i][k] * (cache_digamma_var_gamma[k] - cache_digamma_var_gamma_sum);
      result = result + count * var_rho[i][k] * (cache_digamma_var_lambda[k][term_id] - cache_digamma_var_lambda_k[k]);
      result = result - count * var_rho[i][k] * log(var_rho[i][k]);
      ss_lambda_k[k][term_id] = ss_lambda_k[k][term_id] + count * var_rho[i][k];
    }
  }

  lowerbound = lowerbound + result;
  return small_iter;
}

int LDA_VB::Single_M_Step(){

  for(int k=0; k < TOPIC_NUM; k++){
    var_lambda_k[k] = 0.0;
    cache_sum_loggamma_var_lambda_k[k] = 0.0;
    for(int w = 0; w < TERM_NUM; w++){
      var_lambda[k][w] = beta[w] + ss_lambda_k[k][w];
      ss_lambda_k[k][w] = 0.0;
      // fresh cache
      var_lambda_k[k] = var_lambda_k[k] + var_lambda[k][w];
      cache_digamma_var_lambda[k][w] = digamma(var_lambda[k][w]);      
      cache_sum_loggamma_var_lambda_k[k] += log_gamma(var_lambda[k][w]);
    }
    cache_digamma_var_lambda_k[k] = digamma(var_lambda_k[k]);
    cache_loggamma_var_lambda_k[k] = log_gamma(var_lambda_k[k]);
  }
  // calculate the contribution to the lowerbound

  double result = TOPIC_NUM*(loggamma_beta_sum - sum_loggamma_beta);
  for(int k=0; k< TOPIC_NUM; k++){
    for(int v = 0; v < TERM_NUM; v ++){
      result = result + (beta[v] - 1) * (cache_digamma_var_lambda[k][v] - cache_digamma_var_lambda_k[k]);
      result = result - (var_lambda[k][v] - 1) * (cache_digamma_var_lambda[k][v] - cache_digamma_var_lambda_k[k]);
    }
    result = result - cache_loggamma_var_lambda_k[k] + cache_sum_loggamma_var_lambda_k[k];
  }

  lowerbound = lowerbound + result;
  return 0;
}

void LDA_VB::Save_Expectation_Theta_VarGamma(int d){
  // obtain the original id
  string original_id = data_ids[d];
  file_theta << original_id << "\t";
  double var_gamma_sum = get_sum(var_gamma,TOPIC_NUM);
  for(int k=0; k < TOPIC_NUM; k++){
    file_theta << k << ":" << (var_gamma[k] / var_gamma_sum) << " ";
  }
  file_theta << endl;
}


void LDA_VB::Save_Expectation_Phi_VarLambda(){
  for(int t = 0; t < TERM_NUM; t++){
    file_phi << t << "\t";
    for(int k=0; k < TOPIC_NUM; k++){
      file_phi << k << ":" << (var_lambda[k][t] / var_lambda_k[k]) << " ";
    }
    file_phi << endl;
  }
}

void LDA_VB::Inference(){
  Inference(true);
}

void LDA_VB::Inference(bool saved){
  allocate_memory<double>(var_rho,MAX_INT,TOPIC_NUM);
  PuzaLogger::Instance()->PutString("Start Batch Variational Inference for LDA");
  // variational EM
  var_iter = 0;
  double old_lowerbound = 0;
  while(1){
    // clean sufficient statistics
    lowerbound = 0.0;
    // E-step
    int average_iter = 0;
    for(int d=0; d < DOC_NUM; d++){
      int small_iter = Single_E_Step(d);
      average_iter += small_iter;
    }
    average_iter = average_iter /(float)DOC_NUM;

    // M-step
    Single_M_Step();
    if(saved == true){
      string phi_file_name = output_file_name + ".phi";
      file_phi.open(phi_file_name.c_str());
      Save_Expectation_Phi_VarLambda();
      file_phi.close();
    }

    PuzaLogger::Instance()->PutString("iter :" + to_string<int>(var_iter,std::dec) + " average iter :" + to_string<double>(average_iter,std::dec) \
				      +  " lowerbound:" + to_string<double>(lowerbound,std::dec));

    if(var_iter > 100){
      if((lowerbound - old_lowerbound)<=0.00001)
	break;
    }
    if(var_iter >= 500)
      break;

    old_lowerbound = lowerbound;
    var_iter ++;
  }
  dellocate_memory<double>(var_rho,MAX_INT,TOPIC_NUM);
  PuzaLogger::Instance()->PutString("Batch Learning Finished.");
  file_log.close();
}

void LDA_VB::Prediction(){
  
}

double LDA_VB::Likelihood(){
  return lowerbound;
}


#endif
