/*
 * =====================================================================================
 *
 *       Filename:  plsa.h
 *
 *    Description:  probabilistic Latent Semantic Analysis, actually the training of PLSA
 *                  is just training a mixture of multinominal distribution, seems funny
 *
 *        Version:  1.0
 *        Created:  2009年07月12日 22时01分14秒
 *       Revision:  none
 *       Compiler:  gcc
 * *         Author:  Ying Wang (WY), ywang@nlpr.ia.ac.cn
 *        Company:  Institute of Automation, Chinese Academy of Sciences
 *
 * =====================================================================================
 */
#ifndef PLSA_H
#define PLSA_H
#include "ncmatrix.h"
#include "ncvector.h"
#include "mathutils.h"
#define PLSA_ITER 50
#define PLSA_THRESHOLD 1e-7
class PLSA 
{
public:
	/**
	 * 
	 * 
	 */ 
	PLSA(const NCmatrix<double> &data, int k); 
	void initialize(); 
	void train(); 

public:
	inline NCmatrix<double> respond() const
	{
		return Q;
	}
	inline NCmatrix<double> topicwordprob() const
	{
		return theta;
	}
	inline NCvector<double> topicprob() const
	{
		return pi;
	}
	inline int numtopic() const
	{
		return K;
	}
private:
	double estep();
	void mstep();
private: 
	NCmatrix<double> m_H; //m_H is a N*V matrix, N is number of the training sample, V is the number of Vocabulary.
	                        
	int K;// number of the topics
	int N;
	int V;
	NCvector<double> pi;    //pi is a K-dimension vector, pi(k) denote the probability of kth topic.
	NCmatrix<double> theta; //theta is a K*V matrix, theta(i,j) denote the probability of jth word occuring in kth topic.
	NCmatrix<double> Q; //Q is a N*K matrix, Q(i,k) denote the probability of ith sample assign to kth topic
};


PLSA::PLSA(const NCmatrix<double> &data, int k)
	: m_H(data)
	, K(k)
	, N(m_H.row())
	, V(m_H.column())
	, pi(K)
	, theta(K,V)
	, Q(N,K)
{
}

void PLSA::initialize()
{
	int j,k;
	for ( k=0; k<K; k++ )
	{
		pi[k] = 1./K;
		for( j=0; j<V; j++ )
		{
			theta[k][j] = 1./V;
		}
	}
}
void PLSA::train()
{
	double loglike, oldloglike, change_percent;
	int iter=1;
	initialize();
	oldloglike = estep();
	mstep();
//	iter = 1;

	do
	{
		loglike = estep();
		mstep();
		iter++;
		change_percent = (loglike-oldloglike)/oldloglike;
		oldloglike = loglike;
		std::cout<<"iter num: "<<iter<<std::endl;
		std::cout<<"change percent: "<<change_percent<<std::endl;


	}while(iter<PLSA_ITER && fabs(change_percent)>PLSA_THRESHOLD);


}
double PLSA::estep()
{
	int i,j,k;
	double logprob=0.,tmp,normal;
	for( i=0; i<N; i++ )
	{
		for(normal=0., k=0; k<K; k++ )
		{

			for (tmp=0., j=0; j<V; j++ )
			{
				tmp +=  m_H[i][j]*log(theta[k][j]);
			}

			Q[i][k] = pi[k] * exp(tmp);
			normal += Q[i][k];
			//prob += Q[i][k];
		}
		logprob += log(normal);
		for( k=0; k<K; k++ )
			Q[i][k] /= normal;
	}
	std::cout<<"prob: "<< logprob<<std::endl;
	return logprob;

}

void PLSA::mstep()
{
	int i,j,k;
	double tmppi,tmptheta,normal;

	for( k=0; k<K; k++ )
	{
		for( tmppi=0., i=0; i<N; i++ )
		{
			tmppi += Q[i][k];		
		}
		pi[k] = tmppi/N;
		for(normal=0., j=0; j<V; j++ )
		{
			for( tmptheta=0., i=0; i<N; i++ )
			{
				tmptheta += Q[i][k] * m_H[i][j];
			}
			theta[k][j] = tmptheta;
			normal += tmptheta;
		}

		for( j=0; j<V; j++ )
			theta[k][j] /= normal;
	}

}
#endif
