#ifndef LDA_GIBBS_CC
#define LDA_GIBBS_CC
#include<string>
#include<fstream>
#include<sstream>
#include<iostream>
#include<algorithm>
#include<iterator>
#include<vector>
#include<map>
#include<cstdlib>
#include<ctime>
#include<cmath>
#include "def.hpp"
#include "topicmodel_lda_abstract.hpp"
#include "utils.hpp"

namespace Puza{
  class LDA_Gibbs:public LatentDirichletAllocation{
  private:
    int** n_m_k; // the number of times a term in document m assigned to topic k
    int* n_m;
    int** n_t_k; // the number of times term t assigned to topic k
    int* n_k;  
    int BURN_IN;
    int SAMPLE_LAG;
    int  SAVE_TIME;
    int OPT_INTERVAL; // the interval of gibbs iterations for hyper-parameters optimization                                                                   
    int  LIKELIHOOD;
    
    double alpha_sum;
    double beta_sum;
    int switch_label; // the total number of tokens
    void Single_E_Step(); // a single swipe of data using Monte Carlo E step
    void Single_M_Step();
    void Initial_Sample();
    double  compute_training_likelihood();
    void TopicOut();
    void ModelOut();
  public:
    virtual void Inference();
    virtual void Inference(bool);
    virtual void Prediction();
  virtual double Likelihood();
    virtual int ReadDOCS();
    virtual int GetDocuments(int);
    LDA_Gibbs();
  };

  LDA_Gibbs::LDA_Gibbs(){
    n_m = new int[PuzaDEF::Instance()->DOC_NUM];
    n_k = new int[PuzaDEF::Instance()->TOPIC_NUM];
    allocate_memory<int>(n_m_k, PuzaDEF::Instance()->DOC_NUM,PuzaDEF::Instance()->TOPIC_NUM);
    allocate_memory<int>(n_t_k,PuzaDEF::Instance()->TOPIC_NUM,PuzaDEF::Instance()->TERM_NUM);
    alpha_sum = PuzaUtils::Instance()->get_sum(alpha,PuzaDEF::Instance()->TOPIC_NUM);
    beta_sum = PuzaUtils::Instance()->get_sum(beta,PuzaDEF::Instance()->TERM_NUM);

    // initialization
    for (int i=0; i < PuzaDEF::Instance()->DOC_NUM; i++) { n_m[i] = 0; }
    for (int i=0; i < PuzaDEF::Instance()->TOPIC_NUM; i++) { n_k[i] = 0; }
    
    
    LEARNING_MODE = 0; // batch mode;  
    READ_MODE = 0; // this is already set in the base class
    BURN_IN = 100;
    SAMPLE_LAG = 25;
    SAVE_TIME = 0;
    OPT_INTERVAL = 25; // the interval of gibbs iterations for hyper-parameters optimization
    LIKELIHOOD = 20;
    PuzaLogger::Instance()->PutString("Gibbs Sampling for LDA initialized.");
    PuzaLogger::Instance()->PutString("\tInitial alpha value:"+to_string<double>(PuzaDEF::Instance()->LDA_initial_alpha,std::dec));
    PuzaLogger::Instance()->PutString("\tInitial beta value:"+to_string<double>(PuzaDEF::Instance()->LDA_initial_beta,std::dec));
  }
  
  void LDA_Gibbs::Initial_Sample(){
    // assign initial labels
    for (int i=0; i< PuzaDEF::Instance()->DOC_NUM; i++){
      TermList current_list = TM_DOCS[i];
      int term_count = current_list.size();
      for (int j=0;j<term_count;j++){
	int term_id = current_list[j].term_id;
	int current_t = PuzaUtils::Instance()->uniform_int_sample(0,PuzaDEF::Instance()->TOPIC_NUM);
	TM_DOCS[i][j].term_stat = current_t;
	n_m_k[i][current_t]++;
	n_t_k[current_t][term_id]++;
	n_k[current_t]++;
	n_m[i] ++;
      }
    }
  }
  
  int LDA_Gibbs::ReadDOCS(){
    string temp;
    if(PuzaDEF::Instance()->input_file_name.empty()){
      cout << "Please give input file name." << endl;
      exit(0);
    }
    PuzaDEF::Instance()->InputFile.open(PuzaDEF::Instance()->input_file_name.c_str());
    if(!PuzaDEF::Instance()->InputFile){
      cout << "Can't open the file." << endl;
      exit(0);
    }
    PuzaDEF::Instance()->data_ids.clear();
    TM_DOCS.clear();
    string original_id;
    vector<string> buffer;
    while(!PuzaDEF::Instance()->InputFile.eof()){
      getline(PuzaDEF::Instance()->InputFile,temp,'\n');
      std::istringstream iss(temp);
      // get the doc id
      getline(iss,temp,'\t');
      original_id = temp;
      getline(iss,temp,'\n');
      std::istringstream tempiss(temp);
      vector<string> tokens;
      copy(istream_iterator<string>(tempiss), istream_iterator<string>(), back_inserter<vector<string> >(tokens));
      if(tokens.size()<1){
	continue;
      }
      int now_id = PuzaDEF::Instance()->data_ids.size();
      PuzaDEF::Instance()->data_ids[now_id] = original_id;
      TermList assignList;
      for (vector<string>::iterator iter = tokens.begin(); iter < tokens.end(); iter++){
	int word_id;
	int word_count;
	std::istringstream valueiss((*iter));
	getline(valueiss,temp,':');
	from_string<int>(word_id,temp,std::dec);
	getline(valueiss,temp);  // get count
	from_string<int>(word_count,temp,std::dec);
	for(int i=0; i < word_count; i ++){
	  TermItem newAssign;
	  newAssign.term_id = word_id;
	  newAssign.term_stat = 1; // temporary assignment
	  assignList.push_back(newAssign);
	}
      }
      TM_DOCS.push_back(assignList);
    }
    PuzaDEF::Instance()->InputFile.close();
    return 0;
  }
  
  int LDA_Gibbs::GetDocuments(int choice){
    // batch mode
    return ReadDOCS();
  }
  
  void LDA_Gibbs::Single_E_Step(){
    double* p = new double[PuzaDEF::Instance()->TOPIC_NUM];
    int new_topic = 0;
    double u = 0.0;
    switch_label = 0;
    
    for (int i=0;i<PuzaDEF::Instance()->DOC_NUM;i++){
      TermList current_list = TM_DOCS[i];
      int term_count = current_list.size();
      for (int j=0;j < term_count; j++){
	int term_id = current_list[j].term_id;
	int current_assignment = current_list[j].term_stat;
	
	// remove current                                                                                                                                     
	n_m_k[i][current_assignment]--;
	n_t_k[current_assignment][term_id] --;
	n_k[current_assignment]--;
	
	// sampling from multinomial
	for (int k=0; k< PuzaDEF::Instance()->TOPIC_NUM; k++){
	  p[k] = ((n_t_k[k][term_id] + beta[term_id])/ (n_k[k] + beta_sum))  * ((n_m_k[i][k] + alpha[k])/(term_count - 1 + alpha_sum));
      }
	// normalize                                                                                                                                         
	for (int k=1; k< PuzaDEF::Instance()->TOPIC_NUM; k++){
	  p[k] += p[k-1];
	}
	// sampling from uniform
	//      u = gsl_rng_uniform(PuzaDEF::Instance()->gBaseRand) * p[PuzaDEF::Instance()->TOPIC_NUM-1];
	u = PuzaUtils::Instance()->uniform_double_sample(0.0, p[PuzaDEF::Instance()->TOPIC_NUM-1]);
	
	for(new_topic = 0; new_topic < PuzaDEF::Instance()->TOPIC_NUM; new_topic++){
	  if (u < p[new_topic])
	    break;
	}
	
	n_m_k[i][new_topic]++;
	n_t_k[new_topic][term_id]++;
	n_k[new_topic]++;
	
	TM_DOCS[i][j].term_stat = new_topic;
	if(new_topic != current_assignment)
        switch_label ++;
      }
    }
    delete[] p;
  }
  
  void LDA_Gibbs::Inference(){
    Inference(true);
  }
  
  void LDA_Gibbs::TopicOut(){
    ofstream outFile;
    string model_file = PuzaDEF::Instance()->output_file_name + ".theta";
    outFile.open(model_file.c_str());
    for (int i=0; i<PuzaDEF::Instance()->DOC_NUM; i++){
      outFile << PuzaDEF::Instance()->data_ids[i] << "\t";
      for(int k=0; k<PuzaDEF::Instance()->TOPIC_NUM; k++){
	double value = (n_m_k[i][k] + alpha[k]) / (n_m[i] + alpha_sum);
	outFile << k << ":" << value << " ";
      }
      outFile << endl;
    }
    outFile.flush();
    outFile.close();
    
    model_file = PuzaDEF::Instance()->output_file_name + ".phi";
    outFile.open(model_file.c_str());
    for (int i=0; i<PuzaDEF::Instance()->TERM_NUM; i++){
      outFile << i << "\t";
      for (int k=0; k<PuzaDEF::Instance()->TOPIC_NUM; k++){
	double value = (n_t_k[k][i] + beta[i]) / (n_k[k] + beta_sum);
	outFile << k << ":" << value << " ";
      }
      outFile << endl;
    }
    outFile.flush();
    outFile.close();
  }

  void LDA_Gibbs::ModelOut(){
    ofstream outFile;
    string model_file = PuzaDEF::Instance()->output_file_name + ".doc";
    outFile.open(model_file.c_str());
    for (int i=0; i<PuzaDEF::Instance()->DOC_NUM; i++){
      outFile << PuzaDEF::Instance()->data_ids[i] << "\t" << n_m[i] << "\t";
      for(int k=0; k<PuzaDEF::Instance()->TOPIC_NUM; k++){
	outFile << k << ":" << n_m_k[i][k] << " ";
      }
      outFile << endl;
    }
    outFile.flush();
    outFile.close();
    
    model_file = PuzaDEF::Instance()->output_file_name + ".word";
    outFile.open(model_file.c_str());
    for (int i=0; i<PuzaDEF::Instance()->TERM_NUM; i++){
      outFile << i << "\t";
      for (int k=0; k<PuzaDEF::Instance()->TOPIC_NUM; k++){
	outFile << k << ":" << n_t_k[k][i] << " ";
      }
      outFile << endl;
    }
    outFile.flush();
    outFile.close();
    
    /*  model_file = output_file_name + ".topic";
	outFile.open(model_file.c_str());
	for (int k=0; k<PuzaDEF::Instance()->TOPIC_NUM; k++){
	outFile << k << "\t" << n_k[k] << endl;
	}
	outFile.flush();
	outFile.close(); */
  }
  
  double LDA_Gibbs::compute_training_likelihood(){
    double result = 0.0;
    result += PuzaDEF::Instance()->TOPIC_NUM * PuzaUtils::Instance()->log_gamma(beta_sum);
    for (int k = 0; k < PuzaDEF::Instance()->TOPIC_NUM; k++){
      for (int j = 0; j < PuzaDEF::Instance()->TERM_NUM; j++){
	result += PuzaUtils::Instance()->log_gamma(n_t_k[k][j]+beta[j]);
	result -= PuzaUtils::Instance()->log_gamma(beta[j]);
      }
      result -= PuzaUtils::Instance()->log_gamma(n_k[k]+beta_sum);
    }
    
    result += PuzaDEF::Instance()->DOC_NUM * PuzaUtils::Instance()->log_gamma(alpha_sum);
    for (int k = 0; k < PuzaDEF::Instance()->DOC_NUM; k++){
      for (int j = 0; j < PuzaDEF::Instance()->TOPIC_NUM; j++){
	result += PuzaUtils::Instance()->log_gamma(n_m_k[k][j]+alpha[j]);
	result -= PuzaUtils::Instance()->log_gamma(alpha[j]);
      }
      result -= PuzaUtils::Instance()->log_gamma(n_m[k]+alpha_sum);
    }
    return result;  
  }
  
  void LDA_Gibbs::Inference(bool saved){
    PuzaLogger::Instance()->PutString("Start Batch Gibbs Sampling for LDA");
    Initial_Sample();
    int ITER = 0;
    while(1){
      Single_E_Step();
      
      if((ITER > BURN_IN) && (ITER % SAMPLE_LAG == 0)){
	TopicOut();
	ModelOut();
	SAVE_TIME++;
      }
      
      if (ITER>=500){
	break;
      }
      
      if (ITER % LIKELIHOOD == 0){
	// for every 20 iterations, we output the training set likelihood                                                                                    
	double training_likelihood = compute_training_likelihood();
	PuzaLogger::Instance()->PutString(" Likelihood:" + to_string<double>(training_likelihood,std::dec));
      }
      PuzaLogger::Instance()->PutString("Iteration "+to_string<int>(ITER,std::dec)+ " Label Switch:" + to_string<int>(switch_label,std::dec));
      ITER ++;
    }
    PuzaLogger::Instance()->PutString("Batch Gibbs Sampling Finished.");
}
  
  void LDA_Gibbs::Prediction(){
    
  }
  
  double LDA_Gibbs::Likelihood(){
    return 0.0;
  }
};


#endif
