#ifndef GEO_MF_BATCH_CC
#define GEO_MF_BATCH_CC
#include<string>
#include<fstream>
#include<sstream>
#include<iostream>
#include<algorithm>
#include<iterator>
#include<vector>
#include<map>
#include<cstdlib>
#include<ctime>
#include<cmath>
#include "def.hpp"
#include "utils.hpp"
#include "topicmodel_geo_abstract.hpp"
#include "tbb/tbb.h" // used for Intel parallel threading
#include "boost/unordered_map.hpp" // for hashtable

typedef map<int,double> GradientTerm;
typedef vector<GradientTerm> Gradients;
typedef map<int,double> PiTerm;
typedef vector<PiTerm> PiMatrix;
typedef map<int,int> SSTermCount;
typedef map<int,int> SSDocCount;
typedef vector<SSDocCount> SSTheta;
typedef map<int,SSTermCount> SSTermList;
typedef map<int,SSTermList> SSGeoTopic;
typedef vector<SSTermCount> SSGeoTerm;
typedef vector<SSTermCount> SSTopicsTerms;

using namespace tbb;

/* class ApplyGeoTopic{
  double** phi_norm;
  double* phi_0;
  double norm_global_correction;
  PiMatrix pi;
  GeoTerms phi_geo;
public:
  ApplyGeoTopic(double*& p0, double**& k, PiMatrix& p, GeoTerms& pg, double ng){
    phi_0 = p0;
    pi = p;
    phi_geo = pg;
    norm_global_correction = ng;
    phi_norm = k;
  }
  
  void operator()( const blocked_range<size_t>& r ) const {
    for( size_t i=r.begin(); i!=r.end(); ++i ){
	// calculate the regional correction                                                                                                                
	vector<double> buffer1;
	vector<double> buffer2;
	buffer1.clear();
	buffer2.clear();
	for(GeoTermItem::const_iterator iter = phi_geo[i].begin(); iter != phi_geo[i].end(); ++iter){
	  int term_id = (*iter).first;
	  double value = (*iter).second;
	  // compute exp(phi_0[w]) * [ exp(phi_geo[r][w]) - 1]                                                                
	  buffer1.push_back(value + phi_0[term_id]);
	  buffer2.push_back(phi_0[term_id]);
	}
	double norm_region_correction_part1 = log_sum_vector(buffer1);     
	double norm_region_correction_part2 = log_sum_vector(buffer2);
	// calculate the regional topic correction                                                                                                    
	for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM; k++){
	  buffer1.clear();
	  buffer2.clear();
	  for(PiTerm::const_iterator iter = pi[k].begin();  iter != pi[k].end(); ++iter){
	    int term_id = (*iter).first;
	    double value = (*iter).second;
	    if(phi_geo[i].find(term_id) != phi_geo[i].end()){
	      value = value + phi_0[term_id] + phi_geo[i].find(term_id)->second;
	      buffer1.push_back(value);
	      buffer2.push_back(phi_0[term_id] + phi_geo[i].find(term_id)->second);
	    }
	    else{
	      value = value + phi_0[term_id];
	      buffer1.push_back(value);
	      buffer2.push_back(phi_0[term_id]);
	    }
	  }
	  double norm_region_topic_correction_part1 = buffer1.size()?log_sum_vector(buffer1):0;
	  double norm_region_topic_correction_part2 = buffer2.size()?log_sum_vector(buffer2):0;
	  phi_norm[i][k] = log_sum(norm_global_correction, norm_region_correction_part1);
	  phi_norm[i][k] = log_sum(phi_norm[i][k], norm_region_topic_correction_part1);
	  double minus  = log_sum(norm_region_correction_part2, norm_region_topic_correction_part2);
	  phi_norm[i][k] = log_sub(phi_norm[i][k], minus);
	}
    }
  }
};
*/


class GeoMFBatch:public GeoTopicModel{
protected:
  double* phi_0; // background language model for the whole corpus
  GeoTerms phi_geo;
  vector<int> doc_n;
  PiMatrix pi;
  //  PiMatrix pi2;
  double** phi_norm;
  double* phi_norm_r;
  double* theta_0; // background topic distribution
  double** theta_geo; // background topic distribution for each geo location
  double** theta_user; // background topic distribution for each user
  double* d_k_gradient;
  double** d_u_gradient;
  double** d_r_gradient;
  int* d_k;
  SSTheta d_u_k;
  SSTheta d_r_k;

  vector<map<int,int> >  n_r_k;
  vector<map<int,int> >  n_r_v;
  vector<map<int,int> >  n_k_v;
  int* n_v;
  int* n_k;
  int* n_r;
  int* d_u;
  int* d_r;

  double global_phi0_gradient;
  double** gradient_k_v;
  double* global_pi_gradients;
  double* gradient_phi0_v;
  GradientTerm gradient_r_v;

  double norm_global_correction;
  double norm_region_correction_part1; // exp(phi_0) exp(phi_geo)
  double norm_region_correction_part2; // exp(phi_0)
  double norm_region_topic_correction_part1;
  double norm_region_topic_correction_part2;

  //  int* doc_topic_z; // topic assignment for each document
  vector<int> doc_topic_z;

  int N;
  int switch_label;
  double* theta_norm_buffer; // used for calculating theta norm
  double lowerbound;
  double learning_rate;
  double reg;
  double avg_term;
  double avg_change_term;
  int K; // parameter for L1 regularization by using Truncated Gradient method
  double g; // parameter for controling the sparsity
  ofstream file_output;
  int ITER;
public:
  GeoMFBatch();
  virtual void Inference();
  virtual void Prediction();
  virtual double Likelihood();
  virtual int ReadDOCS();
  virtual int GetDocuments(int); // this indicate how documents are read through into the model
  virtual int SaveModel();
private:
  void PhiNormTBB();
  void PhiNorm();
  void OptimizeThetaZero(int);
  void OptimizeThetaUser(int);
  void OptimizeThetaRegion(int);
  void OptimizePi(int);
  void OptimizePhiZero(int);
  int Single_E_Step(int);
  int Single_M_Step();
};

int GeoMFBatch::SaveModel(){
  ofstream outFile;
  string model_file = PuzaDEF::Instance()->output_file_name + ".pi";
  outFile.open(model_file.c_str());
  for (int i=0; i<PuzaDEF::Instance()->TERM_NUM; i++){
    outFile << i << "\t";
    for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM; k++){
      if(pi[k].find(i)!=pi[k].end()){
	outFile << k << ":" << pi[k][i] << " ";
      }
      else{
	outFile << k << ":0 ";
      }
    }
    outFile << endl;
  }
  outFile.flush();
  outFile.close();

  model_file = PuzaDEF::Instance()->output_file_name + ".phi0";
  outFile.open(model_file.c_str());
  for (int i=0; i<PuzaDEF::Instance()->TERM_NUM; i++){
    outFile << i << ":" << phi_0[i] << " " << endl;;
  }
  outFile.flush();
  outFile.close();

  model_file = PuzaDEF::Instance()->output_file_name + ".theta0";
  outFile.open(model_file.c_str());
  for (int k=0; k<PuzaDEF::Instance()->TOPIC_NUM; k++){
    outFile << k << ":" << theta_0[k] << " " << endl;;
  }
  outFile.flush();
  outFile.close();

  model_file = PuzaDEF::Instance()->output_file_name + ".theta_geo";
  outFile.open(model_file.c_str());
  for(int i=0; i < PuzaDEF::Instance()->GEO_REGION_NUM; i++){
    outFile << i << "\t";
    for (int k=0; k<PuzaDEF::Instance()->TOPIC_NUM; k++){
      outFile << k << ":" << theta_geo[i][k] << " " ;
    }
    outFile << endl;
  }
  outFile.flush();
  outFile.close();

  model_file = PuzaDEF::Instance()->output_file_name + ".theta_user";
  outFile.open(model_file.c_str());
  for(int i=0; i < PuzaDEF::Instance()->GEO_USER_NUM; i++){
    outFile << i << "\t";
    for (int k=0; k<PuzaDEF::Instance()->TOPIC_NUM; k++){
      outFile << k << ":" << theta_user[i][k] << " " ;
    }
    outFile << endl;
  }
  outFile.flush();
  outFile.close();

  model_file = PuzaDEF::Instance()->output_file_name + ".phi_geo";
  outFile.open(model_file.c_str());
  for(int r=0; r<PuzaDEF::Instance()->GEO_REGION_NUM; r++){
    outFile << geo_mapping[r] << "\t";
    for(map<int,double>::iterator iter = phi_geo[r].begin(); iter != phi_geo[r].end(); iter++){
      outFile << (*iter).first << ":" << (*iter).second << " ";
    }
    outFile << endl;
  }
  outFile.flush();
  outFile.close();

  model_file = PuzaDEF::Instance()->output_file_name + ".z";
  outFile.open(model_file.c_str());
  for(unsigned int d = 0; d < TM_DOCS.size(); d++){
    int k = doc_topic_z[d];
    int region = doc_geo_stamp[d];
    int user = doc_user_stamp[d];
    outFile << d << "\t" << region << "\t" << user << "\t" << k << endl;
  }
  outFile.flush();
  outFile.close();
  return 0;
}

GeoMFBatch::GeoMFBatch(){
  allocate_memory<double>(gradient_k_v, PuzaDEF::Instance()->TOPIC_NUM,PuzaDEF::Instance()->TERM_NUM);
  allocate_memory<double>(theta_geo, PuzaDEF::Instance()->GEO_REGION_NUM,PuzaDEF::Instance()->TOPIC_NUM);
  allocate_memory<double>(theta_user, PuzaDEF::Instance()->GEO_USER_NUM,PuzaDEF::Instance()->TOPIC_NUM);
  allocate_memory<double>(phi_norm, PuzaDEF::Instance()->GEO_REGION_NUM,PuzaDEF::Instance()->TOPIC_NUM);
  allocate_memory<double>(d_u_gradient, PuzaDEF::Instance()->GEO_USER_NUM,PuzaDEF::Instance()->TOPIC_NUM);
  allocate_memory<double>(d_r_gradient, PuzaDEF::Instance()->GEO_REGION_NUM,PuzaDEF::Instance()->TOPIC_NUM);

  phi_0= new double[PuzaDEF::Instance()->TERM_NUM];
  theta_0 = new double[PuzaDEF::Instance()->TOPIC_NUM];
  //  doc_topic_z = new int[PuzaDEF::Instance()->DOC_NUM];
  theta_norm_buffer = new double[PuzaDEF::Instance()->TOPIC_NUM];

  phi_geo.resize(PuzaDEF::Instance()->GEO_REGION_NUM);
  pi.resize(PuzaDEF::Instance()->TOPIC_NUM);
  //  pi2.resize(PuzaDEF::Instance()->TOPIC_NUM);

  n_r_v.resize(PuzaDEF::Instance()->GEO_REGION_NUM);
  n_r_k.resize(PuzaDEF::Instance()->GEO_REGION_NUM);
  n_k_v.resize(PuzaDEF::Instance()->TOPIC_NUM);
  n_v = new int[PuzaDEF::Instance()->TERM_NUM];
  n_k = new int[PuzaDEF::Instance()->TOPIC_NUM];

  d_r_k.resize(PuzaDEF::Instance()->GEO_REGION_NUM);
  d_u_k.resize(PuzaDEF::Instance()->GEO_USER_NUM);
  d_k = new int[PuzaDEF::Instance()->TOPIC_NUM];
  d_u = new int[PuzaDEF::Instance()->GEO_USER_NUM];
  d_r = new int[PuzaDEF::Instance()->GEO_REGION_NUM];
  n_r = new int[PuzaDEF::Instance()->GEO_REGION_NUM];
  phi_norm_r = new double[PuzaDEF::Instance()->GEO_REGION_NUM];

  d_k_gradient = new double[PuzaDEF::Instance()->TOPIC_NUM];
  global_pi_gradients = new double[PuzaDEF::Instance()->TOPIC_NUM];
  gradient_phi0_v = new double[PuzaDEF::Instance()->TERM_NUM];
  
  // some initialization
  for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM; k ++){
    theta_0[k] = 0.0;
    d_k[k] = 0;
    n_k[k] = 0;
    d_k_gradient[k] = 0.0;
    global_pi_gradients[k] = 0.0;
  }

  for(int r=0; r < PuzaDEF::Instance()->GEO_REGION_NUM; r ++){
    for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM; k++){
      theta_geo[r][k] = 0.0;
      d_r_gradient[r][k] = 0.0;
      n_r_k[r][k] = 0;
    }
    d_r[r] = 0;
    n_r[r] = 0;
    phi_norm_r[r] = 0.0;
  }

  for(int u=0; u < PuzaDEF::Instance()->GEO_USER_NUM; u ++){
    for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM; k++){
      theta_user[u][k] = 0.0;
      d_u_gradient[u][k] = 0.0;
    }
    d_u[u] = 0;
  }

  for(int i=0; i < PuzaDEF::Instance()->TERM_NUM; i++){
    phi_0[i] = 0.0;
    gradient_phi0_v[i] = 0.0;
  }

  learning_rate = PuzaDEF::Instance()->GEO_LEARNING_RATE;
  reg = PuzaDEF::Instance()->GEO_REG;
  K = PuzaDEF::Instance()->GEO_REG_L1_K;
  g = PuzaDEF::Instance()->GEO_REG_L1_G;
  avg_term = 0.0;
  avg_change_term = 0.0;
}

int GeoMFBatch::Single_E_Step(int d){
  // perform gibbs sampling for each document
  double* p = new double[PuzaDEF::Instance()->TOPIC_NUM];  
  int new_topic = 0;
  int old_k = doc_topic_z[d];
  int user = doc_user_stamp[d];
  int region = doc_geo_stamp[d];

  double u = 0.0;
  double theta_norm = 0.0;
  TermList current_list = TM_DOCS[d];
  int term_count = current_list.size();

  for(int k =0; k < PuzaDEF::Instance()->TOPIC_NUM;k++){
    //    theta_norm_buffer[k] = theta_user[user][k] + theta_0[k] + theta_geo[region][k];
    theta_norm_buffer[k] = theta_user[user][k];
  }
  theta_norm = log_sum_array(theta_norm_buffer,PuzaDEF::Instance()->TOPIC_NUM); // calculate theta normalization part
  
  for(int k =0; k < PuzaDEF::Instance()->TOPIC_NUM; k++){
    //    p[k] = theta_user[user][k] + theta_0[k] + theta_geo[region][k] - theta_norm;
    p[k] = theta_user[user][k] - theta_norm;
    for (int j=0;j < term_count; j++){
      int term_id = current_list[j].term_id;
      int count = current_list[j].term_stat;
      double value = phi_0[term_id];
      if(phi_geo[region].find(term_id) != phi_geo[region].end())
	value += phi_geo[region][term_id];
      if(pi[k].find(term_id) != pi[k].end())
	value += pi[k][term_id];
      p[k] = p[k] + count * (value - phi_norm[region][k]); // term level
    }
  }

  for(int j=0; j < term_count; j++){
    int term_id = current_list[j].term_id;
    int count = current_list[j].term_stat;
    n_k_v[old_k][term_id] = n_k_v[old_k][term_id] - count;
    //    if(n_k_v[old_k][term_id] == 0)
    //      n_k_v[old_k].erase(term_id);
  }
   
  // remove sufficient statistics
  n_r_k[region][old_k] = n_r_k[region][old_k] - doc_n[d];
  if(n_r_k[region][old_k] == 0)
    n_r_k[region].erase(old_k);  

  d_k[old_k] --;
  d_u_k[user][old_k] --;
  if(d_u_k[user][old_k] == 0)
    d_u_k[user].erase(old_k);
  d_r_k[region][old_k] --;
  if(d_r_k[region][old_k] == 0)
    d_r_k[region].erase(old_k);
  n_k[old_k] -= doc_n[d];
  
  // convert back to probabilities and normalize
  p[0] = exp(p[0]);
  for(int k=1; k < PuzaDEF::Instance()->TOPIC_NUM; k++){
    p[k] = exp(p[k]) + p[k-1];
  }
  
  // sampling from uniform distribution
  u = gsl_rng_uniform(PuzaDEF::Instance()->gBaseRand) * p[PuzaDEF::Instance()->TOPIC_NUM-1];
  for(new_topic = 0; new_topic < PuzaDEF::Instance()->TOPIC_NUM; new_topic++){
    if (u < p[new_topic])
      break;
  }

  // add sufficient statistics
  n_r_k[region][new_topic] += doc_n[d];
  n_k[new_topic] += doc_n[d];
  d_k[new_topic] ++;
  d_r_k[region][new_topic]++;
  d_u_k[user][new_topic]++;
  for (int j=0;j < term_count; j++){
    int term_id = current_list[j].term_id;
    int count = current_list[j].term_stat;
    n_k_v[new_topic][term_id] += count;
  }

  // update theta gradients
  for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM; k++){
    //    double temp = exp(theta_0[k] + theta_geo[region][k] + theta_user[user][k] - theta_norm);
    double temp =exp(theta_user[user][k] - theta_norm);
    d_k_gradient[k] += temp;
    d_u_gradient[user][k] += temp;
    d_r_gradient[region][k] += temp;
  }

  if(new_topic != doc_topic_z[d]){
    switch_label ++;
    avg_change_term += doc_n[d];
  }

  avg_term += doc_n[d];

  doc_topic_z[d] = new_topic;
  delete p;
  return 0;
}

void GeoMFBatch::OptimizeThetaUser(int shrink){
  // update theta user
  if(shrink == 1){
    for(int u=0; u < PuzaDEF::Instance()->GEO_USER_NUM; u++){
      for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM;k++){
	double gradient = 0.0;
	if(d_u_k[u].find(k) != d_u_k[u].end())
	  gradient += d_u_k[u][k];
	gradient = gradient - d_u_gradient[u][k];
	theta_user[u][k] = Trunc(theta_user[u][k] + learning_rate * gradient, g);
	d_u_gradient[u][k] = 0.0;
      }
    }
  }
  else{
    for(int u=0; u < PuzaDEF::Instance()->GEO_USER_NUM; u++){
      for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM;k++){
	double gradient = 0.0;
	if(d_u_k[u].find(k) != d_u_k[u].end())
	  gradient += d_u_k[u][k];
	gradient = gradient - d_u_gradient[u][k];
	theta_user[u][k] = theta_user[u][k] + learning_rate * gradient;
	d_u_gradient[u][k] = 0.0;
      }
    }
  }
}

void GeoMFBatch::OptimizeThetaZero(int shrink){
  // update theta 0
  for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM; k++){
    double gradient = d_k[k] - d_k_gradient[k];
    theta_0[k] = theta_0[k] + learning_rate * gradient;
    d_k_gradient[k] = 0.0;
  }
}

void GeoMFBatch::OptimizeThetaRegion(int shrink){
  // update theta geo
  for(int r=0; r < PuzaDEF::Instance()->GEO_REGION_NUM; r++){
    for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM;k++){
      double gradient = 0.0;
      if(d_r_k[r].find(k) != d_r_k[r].end())
	gradient += d_r_k[r][k];
      gradient = gradient - d_r_gradient[r][k];
      theta_geo[r][k] = Trunc(theta_geo[r][k] + learning_rate * gradient, learning_rate * g);
      d_r_gradient[r][k] = 0.0;
    }
  }
}

void GeoMFBatch::OptimizePi(int shrink){
  for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM; k++){
    //    double sparsity = 0.0;
    for(int v=0; v < PuzaDEF::Instance()->TERM_NUM; v++){
      double gradient = - (global_pi_gradients[k] * exp(phi_0[v]) + gradient_k_v[k][v]);    
      if(n_k_v[k].find(v)!=n_k_v[k].end()){
	gradient += n_k_v[k][v];
      }
      double temp;
      if(shrink)
	temp = Trunc(pi[k][v] + learning_rate * gradient, g);
      else
	temp = pi[k][v] + learning_rate * gradient;
      pi[k][v] = temp;
      //      cout << pi2[k][v] - pi[k][v] << endl;
      /*      if(fabs(pi2[k][v] - pi[k][v])>=1e-10){
	cout << "error " << k << " " << v << " " << endl;
	cout << pi2[k][v] << " " << pi[k][v] << endl;
	cout << pi2[k][v] - pi[k][v] << endl;
	exit(0);
      } 
      */     
      gradient_k_v[k][v]=0.0;
    }
    //    sparsity = sparsity / PuzaDEF::Instance()->TERM_NUM;
    //    PuzaLogger::Instance()->PutString("\t\tTopic "+to_string<int>(k,std::dec)+" Sparsity:"+to_string<double>(sparsity,std::dec));
    global_pi_gradients[k] = 0.0;
  }
}

void GeoMFBatch::OptimizePhiZero(int shrink){
  double sparsity = 0.0;
  for(int v=0; v < PuzaDEF::Instance()->TERM_NUM; v++){
    double gradient = n_v[v] - (global_phi0_gradient * exp(phi_0[v]) + gradient_phi0_v[v]);
    phi_0[v] = shrink?Trunc(phi_0[v] + learning_rate * gradient, g):phi_0[v]+learning_rate * gradient;
    if(phi_0[v] == 0.0)
      sparsity ++;
    gradient_phi0_v[v] = 0.0;
  }
  sparsity = sparsity / PuzaDEF::Instance()->TERM_NUM;
  PuzaLogger::Instance()->PutString("\t\tPhi 0 Sparsity:"+to_string<double>(sparsity,std::dec));
}

int GeoMFBatch::Single_M_Step(){
  global_phi0_gradient = 0.0;
  ofstream outFile;
  //  string model_file = PuzaDEF::Instance()->output_file_name + ".region_sparsity";
  //  outFile.open(model_file.c_str());
  for(int r =0; r < PuzaDEF::Instance()->GEO_REGION_NUM; r++){
    // calculate region contribution to gradients of pi
    double global_phi_geo_gradient = 0.0;
    gradient_r_v.clear();
    for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM; k++){
      if(n_r_k[r].find(k)!=n_r_k[r].end()){
	double temp = exp(log(n_r_k[r][k]) - phi_norm[r][k]);
	if(temp != temp){
	  cout << n_r_k[r][k] << " " << phi_norm[r][k] << endl;
	  exit(0);
	}
	global_phi_geo_gradient += temp;
	global_pi_gradients[k] += temp;
	if(global_pi_gradients[k] != global_pi_gradients[k]){
	  cout << n_r_k[r][k] << " " << phi_norm[r][k] << endl;
	  exit(0);
	}
	  
	global_phi0_gradient += temp;
	
	for(GeoTermItem::iterator iter = phi_geo[r].begin(); iter != phi_geo[r].end(); iter ++){
	  int term_id = (*iter).first;
	  double phi_geo_r_v = (*iter).second;
	  double temp1 = temp * exp(phi_0[term_id]) * (exp(phi_geo_r_v)-1);
	  gradient_k_v[k][term_id] += temp1;
	  gradient_r_v[term_id] += temp1;
	  gradient_phi0_v[term_id] += temp1;
	}

	for(PiTerm::iterator iter = pi[k].begin(); iter != pi[k].end(); iter ++){
	  int term_id = (*iter).first;
	  double pi_k_v = (*iter).second;
	  double temp1;
	  if(phi_geo[r].find(term_id)!=phi_geo[r].end()){
	    temp1 = temp * (exp(phi_0[term_id] + phi_geo[r][term_id] + pi_k_v) - exp(phi_0[term_id] + phi_geo[r][term_id]));
	  }
	  else{
	    temp1 = temp * (exp(phi_0[term_id] + pi_k_v) - exp(phi_0[term_id]));
	  }
	  gradient_k_v[k][term_id] += temp1;
	  gradient_phi0_v[term_id] += temp1;
	  if(n_r_v[r].find(term_id)!=n_r_v[r].end())
	    gradient_r_v[term_id] += temp1;
	}
      }
    } // end r
    // update phi_geo
    double sparsity = 0.0;
    for(map<int,int>::iterator iter = n_r_v[r].begin(); iter != n_r_v[r].end(); iter ++){
      int term_id = (*iter).first;
      int count = (*iter).second;
      double gradient = count - (global_phi_geo_gradient * exp(phi_0[term_id]) + gradient_r_v[term_id]);
      double temp = 0.0;
      temp = Trunc(phi_geo[r][term_id] + learning_rate * gradient, g);
      if(temp == 0.0){
      	phi_geo[r].erase(term_id);
	sparsity ++;
      }
      else{
	phi_geo[r][term_id] = temp;
      }
    }
    outFile << geo_mapping[r] << "\t" << sparsity << " " << n_r_v[r].size() << " " << (sparsity / n_r_v[r].size()) << endl;
  }
  outFile.close();
  
  
   
  // update pi
  //  OptimizePi(0);  
  // update phi0
  while(1){
    for(int r = 0; r < PuzaDEF::Instance()->GEO_REGION_NUM; r++){
      for(map<int,int>::iterator iter = n_r_v[r].begin(); iter != n_r_v[r].end(); iter ++){                                                         
	int term_id = (*iter).first;
	int count = (*iter).second;
	double gradient = count - n_r[r] * exp(phi_0[term_id] + phi_geo[r][term_id] - phi_norm_r[r]);
	phi_geo[r][term_id] = phi_geo[r][term_id] + learning_rate * gradient;
      }
    }
    //    OptimizePhiZero(0);
  // update theta0
  //  OptimizeThetaZero(0);
  // update theta region
  //  OptimizeThetaRegion(0);
  // update theta user  
  //  OptimizeThetaUser(0);
  
  //  PhiNormTBB();
    PhiNorm();
    //    cout << Likelihood() << endl;
    break;
  }
  //PuzaLogger::Instance()->PutString("\t\tPhi Norm is updated.");
  return 0;
}

void GeoMFBatch::Inference(){
  PuzaLogger::Instance()->PutString("Start Inference for Geo Matrix Factorization Model");
  if(PuzaDEF::Instance()->GEO_REG_TYPE == 0){
    PuzaLogger::Instance()->PutString("L2 Regularization.");
  }
  else{
    PuzaLogger::Instance()->PutString("L1 Regularization.");
  }
  ITER = 0;
  double old_lowerbound = 0.0;
  while(1){
    switch_label = 0;
    PuzaLogger::Instance()->PutString("Iteration "+to_string<int>(ITER,std::dec));
    //for(unsigned int d=0; d < TM_DOCS.size(); d++){
    //      Single_E_Step(d);
    //    }
    //    PuzaLogger::Instance()->PutString("\tE-Step Finished."+to_string<int>(ITER,std::dec));
    Single_M_Step();
    //    PuzaLogger::Instance()->PutString("\tM-Step Finished."+to_string<int>(ITER,std::dec));
    Likelihood();
    //    PuzaLogger::Instance()->PutString("\tLabel Switch:" + to_string<int>(switch_label,std::dec)+" " + to_string<int>(ITER,std::dec));
    //    PuzaLogger::Instance()->PutString("\tAvg Change Terms:" + to_string<double>(avg_change_term/switch_label,std::dec)+" " + to_string<int>(ITER,std::dec));
    //    PuzaLogger::Instance()->PutString("\tAvg Terms:" + to_string<double>(avg_term/TM_DOCS.size(),std::dec)+" " + to_string<int>(ITER,std::dec));
    PuzaLogger::Instance()->PutString("\tLikelihood:" + to_string<double>(lowerbound,std::dec)+" " + to_string<int>(ITER,std::dec));

    //    avg_change_term = 0.0;
    //    avg_term = 0.0;

    old_lowerbound = lowerbound;
    if (ITER % 50 == 0){
      SaveModel();
    }

    if (ITER>=500){
      break;
    }

    ITER ++;
  }
  PuzaLogger::Instance()->PutString("Batch Learning Finished.");
}

void GeoMFBatch::Prediction(){
  double error = 0.0;
  for(unsigned int d = 0; d < TM_DOCS.size(); d++){
    //    int k = doc_topic_z[d];
    TermList current_list = TM_DOCS[d];
    int term_count = current_list.size();
    int region = doc_geo_stamp[d];
    double max_like = 0;
    int max_r = -1;

    for(int r = 0; r < PuzaDEF::Instance()->GEO_REGION_NUM; r++){
      double result = 0.0;
      for (int j=0;j < term_count; j++){
	int term_id = current_list[j].term_id;
	int count = current_list[j].term_stat;
	double temp = phi_0[term_id];
	if(phi_geo[r].find(term_id)!=phi_geo[r].end())
	  temp = temp + phi_geo[r][term_id];
	result = result + count * (temp - phi_norm_r[r]);
      }
      if(result > max_like){
	max_like = result;
	max_r = r;
      }
      if(r == 0){
	max_like = result;
	max_r = r;
      }
    }
    if(max_r != region)
      error ++;
  }
  cout << error << " " << TM_DOCS.size() << " " << (TM_DOCS.size()-error)/TM_DOCS.size() << endl;
}
 
double GeoMFBatch::Likelihood(){
  double result = 0.0;

  for(unsigned int d = 0; d < TM_DOCS.size(); d++){
    //    int k = doc_topic_z[d];
    TermList current_list = TM_DOCS[d];
    int term_count = current_list.size();
    //    int user = doc_user_stamp[d];
    int region = doc_geo_stamp[d];
    
    //    for(int j =0; j < PuzaDEF::Instance()->TOPIC_NUM;j++){
      //      theta_norm_buffer[j] = theta_0[j] + theta_geo[region][j] + theta_user[user][j];
    //      theta_norm_buffer[j] = theta_user[user][j];
    //    }
    //    double theta_norm = log_sum_array(theta_norm_buffer,PuzaDEF::Instance()->TOPIC_NUM);
    //    result = result + theta_0[k] + theta_geo[region][k] + theta_user[user][k] - theta_norm;
    //    result = result + theta_user[user][k] - theta_norm;
    for (int j=0;j < term_count; j++){
      int term_id = current_list[j].term_id;
      int count = current_list[j].term_stat;
      double temp = phi_0[term_id];
      //if(pi[k].find(term_id)!=pi[k].end())
      //temp = temp + pi[k][term_id];
      if(phi_geo[region].find(term_id)!=phi_geo[region].end())
	temp = temp + phi_geo[region][term_id];
      result = result + count * (temp - phi_norm_r[region]);
    }      
  }
  lowerbound = result;
  return lowerbound;
}

int GeoMFBatch::ReadDOCS(){
  int MAX_INT = 0;
  int c = 0;
  string temp;
  N = 0;
  if(PuzaDEF::Instance()->input_file_name.empty()){
    cout << "Please give input file name." << endl;
    exit(0);
  }
  ifstream InputFile;
  InputFile.open(PuzaDEF::Instance()->input_file_name.c_str());
  if(!InputFile){
    cout << "Can't open the file." << endl;
    exit(0);
  }
  PuzaDEF::Instance()->data_ids.clear();
  TM_DOCS.clear();
  doc_n.clear();
  doc_topic_z.clear();
  doc_geo_stamp.clear();
  doc_user_stamp.clear();
  string original_id;
  string temp_geo_stamp;
  string temp_user_stamp;
  vector<string> buffer;
  while(!InputFile.eof()){
    getline(InputFile,temp,'\n');
    std::istringstream iss(temp);
    // get the doc id
    getline(iss,temp,'\t');
    original_id = temp;
    // get geo                                                                                                                                 
    getline(iss,temp,'\t');
    temp_geo_stamp = temp;
    // get user
    getline(iss,temp,'\t');
    temp_user_stamp = temp;
    // get the doc body
    getline(iss,temp,'\n');
    std::istringstream tempiss(temp);
    vector<string> tokens;
    copy(istream_iterator<string>(tempiss), istream_iterator<string>(), back_inserter<vector<string> >(tokens));
    if(tokens.size()<1){
      continue;
    }

    int now_id = PuzaDEF::Instance()->data_ids.size();
    PuzaDEF::Instance()->data_ids[now_id] = original_id;
    boost::unordered_map<string,int>::iterator geo_iter = geo_reverse_mapping.find(temp_geo_stamp);
    int geo_id = -1;
    int user_id = -1;
    if(geo_iter != geo_reverse_mapping.end()){
      // id found
      geo_id = (*geo_iter).second;
      doc_geo_stamp.push_back(geo_id);
    }
    else{
      geo_id = geo_reverse_mapping.size();
      geo_reverse_mapping[temp_geo_stamp] = geo_id;
      geo_mapping[geo_id] = temp_geo_stamp;
      doc_geo_stamp.push_back(geo_id);
    }

    boost::unordered_map<string,int>::iterator user_iter = user_reverse_mapping.find(temp_user_stamp);
    if(user_iter != user_reverse_mapping.end()){
      // id found
      user_id = (*user_iter).second;
      doc_user_stamp.push_back(user_id);
    }
    else{
      user_id = user_reverse_mapping.size();
      user_reverse_mapping[temp_user_stamp] = user_id;
      user_mapping[user_id] = temp_user_stamp;
      doc_user_stamp.push_back(user_id);
    }
    // check user geo dist
    TermList assignList;
    int temp_n = 0;
    // update randomized parameters
    //    doc_topic_z[c] = 0;
    int k = (int)random_uniform_distribution(0, PuzaDEF::Instance()->TOPIC_NUM);
    doc_topic_z.push_back(k);
    for (vector<string>::iterator iter = tokens.begin(); iter < tokens.end(); iter++){
      int word_id;
      int word_count;
      std::istringstream valueiss((*iter));
      getline(valueiss,temp,':');
      from_string<int>(word_id,temp,std::dec);
      getline(valueiss,temp);  // get count
      from_string<int>(word_count,temp,std::dec);
      TermItem newAssign;
      newAssign.term_id = word_id;
      newAssign.term_stat = word_count;
      assignList.push_back(newAssign);
      N = N + word_count;
      temp_n = temp_n + word_count;
      n_k_v[k][word_id] = n_k_v[k][word_id] + word_count;
      n_r_v[geo_id][word_id] = n_r_v[geo_id][word_id] + word_count;
      n_v[word_id] = n_v[word_id] + word_count;
      //      phi_geo[geo_id][word_id] += word_count;
      //pi[k][word_id] = pi[k][word_id] + word_count;
    }
    if((int)assignList.size() > MAX_INT)
      MAX_INT = assignList.size();
    TM_DOCS.push_back(assignList);
    doc_n.push_back(temp_n);
    d_k[k] ++;
    d_u[user_id] ++;
    //    theta_user[user_id][k] ++;
    //    theta_geo[geo_id][k] ++;
    d_r[geo_id] ++;
    d_u_k[user_id][k] ++;
    d_r_k[geo_id][k] ++;
    n_k[k] += temp_n;
    n_r_k[geo_id][k] += temp_n;
    n_r[geo_id] += temp_n;
    c++;
  }
  InputFile.close();
  return MAX_INT;
}

void GeoMFBatch::PhiNormTBB(){  
  // calculate the global correction
  norm_global_correction = log_sum_array(phi_0, PuzaDEF::Instance()->TERM_NUM);
  parallel_for(blocked_range<size_t>(0,PuzaDEF::Instance()->GEO_REGION_NUM), ApplyGeoTopic(phi_0,phi_norm,pi,phi_geo,norm_global_correction));
}

void GeoMFBatch::PhiNorm(){
  double* buffer = new double[PuzaDEF::Instance()->TERM_NUM];
  /*  for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM; k++){
    for(int i=0; i < PuzaDEF::Instance()->TERM_NUM; i++){
      buffer[i] = phi_0[i] + pi[k][i];
    }
    double temp = log_sum_array(buffer,PuzaDEF::Instance()->TERM_NUM);
    for(int r=0; r < PuzaDEF::Instance()->GEO_REGION_NUM; r++){
      phi_norm[r][k] = temp;
    }
  }
  */
  for(int r = 0; r < PuzaDEF::Instance()->TOPIC_NUM; r++){
    for(int i=0; i < PuzaDEF::Instance()->TERM_NUM; i++){
      buffer[i] = phi_0[i];
      if(phi_geo[r].find(i)!=phi_geo[r].end())
	buffer[i] += phi_geo[r][i];
    }
    double temp = log_sum_array(buffer,PuzaDEF::Instance()->TERM_NUM);
    phi_norm_r[r] = temp;
  }
  delete buffer;
}

int GeoMFBatch::GetDocuments(int choice){
  ReadDOCS();
  
  if(choice == 0){
    // training
    for(int i=0; i < PuzaDEF::Instance()->TERM_NUM; i++)
      phi_0[i] = log((n_v[i] + 1.0)/(N+PuzaDEF::Instance()->TERM_NUM));

    for(int r=0; r < PuzaDEF::Instance()->GEO_REGION_NUM; r++){
      for(map<int,int>::iterator iter = n_r_v[r].begin(); iter != n_r_v[r].end(); iter++){
	int term_id = (*iter).first;
	double value = (*iter).second;
	double temp = log(value / n_r[r]) - phi_0[term_id];
	phi_geo[r][term_id] = temp;
      }
    }
  

    /*for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM; k++){
      for(int i=0; i < PuzaDEF::Instance()->TERM_NUM; i++){
      pi[k][i] = log( (pi[k][i] + 1.0) / (n_k[k] + PuzaDEF::Instance()->TERM_NUM) );
      }
      }
    */
    
    
    //  PhiNormTBB();
    PhiNorm();
    
    /*  for(int u=0; u < PuzaDEF::Instance()->GEO_USER_NUM; u++){
	for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM; k++){
	theta_user[u][k] = log ((theta_user[u][k] + 1.0) / (d_u[u] + PuzaDEF::Instance()->TOPIC_NUM));
	}
	}
    */

    // initilize theta parameters
    /*for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM; k++){
      theta_0[k] = log( (d_k[k] + 1.0) / (TM_DOCS.size() + PuzaDEF::Instance()->TOPIC_NUM));
    }

    for(int r=0; r < PuzaDEF::Instance()->GEO_REGION_NUM; r++){
    for(int k=0; k < PuzaDEF::Instance()->TOPIC_NUM; k++){
    theta_geo[r][k] = log((theta_geo[r][k] + 1.0) / (d_r[r] + PuzaDEF::Instance()->TOPIC_NUM));
    }
    }
    */
  }
  return 0;
}
#endif
