#include "word.h"

#include <fstream>
#include <bitset>
#include <set>
#include <sstream>
#include <map>
#include <vector>
#include <list>
#include <algorithm>
#include <math.h>
#include <time.h>
#include <limits.h>
#include <errno.h>
#include <stdio.h>
#include <pthread.h>
#include <string.h>
#include <ext/hash_map>
#include <ext/hash_set>

#include "gbk.h"
#include "slog.h"
#include "Config.h"

using namespace std;
using namespace d_utils;
using __gnu_cxx::hash_map;


struct myclass {
	bool operator() (const Entrorank& i,const Entrorank& j) 
	{ 
		return ((i.entrol+i.entror) > (j.entrol+j.entror));
	}
} entrorankcomp;

static pthread_mutex_t g_lock = PTHREAD_MUTEX_INITIALIZER;
static pthread_mutex_t g_lock1 = PTHREAD_MUTEX_INITIALIZER;
static long long int chn_chars;

static void ReadItem(const char* itemfile, str_bucket_t& str_chn, str_bucket_t& str_eng, str_bucket_t& str_short, int start, int lines)
{
	ifstream fin(itemfile);
	if(fin == NULL) Info("cannot read %s", itemfile);

	string line;
	int linecount=0;
	while(getline(fin,line)!=NULL && linecount<start)
	{
		linecount++;
	}
	linecount = 0;

	while(getline(fin,line)!=NULL && linecount<lines)
	{
		linecount++;
		const char* txt;

		//get "title","tags","introduction"
		size_t found = 0;
		for(int i=0; i<7; i++)
		{
			found = line.find(",", ++found);
		}
		if(found == string::npos) txt = line.c_str();
		else txt = line.substr(found, line.size()).c_str();

		//strip ", we have title,tag1,tag2...tagn,introduction
		line.assign(txt);
		EraseChar(line, '\"');

		//split by ,
		str_bucket_t buffer;
		SplitByChar(buffer, line, ',');

		//process each string, and identify if the string contains Chinese character
		while(!buffer.empty()){
			//identify Chinese characters
			line = *(buffer.begin());
			int count = 0;
			bool is_chn = false;
			size_t scan = 0;
			string word;
			while(scan < line.size())
			{
				count ++;
				word = GetGBKChar(line, scan);
				if( isGBK(word))
				{
					if(!is_chn ) is_chn = true;
				}
				scan += word.size();
			}
			pthread_mutex_lock(&g_lock);
			if(is_chn)	
			{
				if(count <= 2) str_short.push_back(line);
				else str_chn.push_back(line);
			}
			//else str_eng.push_back(line);
			pthread_mutex_unlock(&g_lock);
			buffer.erase(buffer.begin());
		}
	}
	fin.close();
}

struct ReadItemPara
{
	int id, len, start;
	const char *itemfile;
	str_bucket_t *str_chn, *str_eng, *str_short;
};

void* ReadItemProc(void* para)
{
	ReadItem(((ReadItemPara*)para)->itemfile, *(((ReadItemPara*)para)->str_chn), *(((ReadItemPara*)para)->str_eng),
			*(((ReadItemPara*)para)->str_short), ((ReadItemPara*)para)->start,
			((ReadItemPara*)para)->len);
	return NULL;
}

static void ReadItem1(const char* itemfile, str_bucket_t& str_chn, str_bucket_t& str_eng, str_bucket_t& str_short, int threadcount)
{
	Info("in file %s", itemfile);

	pthread_t threads[threadcount];
	ReadItemPara para[threadcount];

	ifstream fin(itemfile);
	if(fin == NULL) Info("cannot read %s", itemfile);
	string line;
	int linecount=0;
	while(getline(fin,line)!=NULL)
	{
		linecount++;
	}
	Info("%d lines to read", linecount);
	int readlen = linecount/threadcount;

	Info("use %d threads to read item", threadcount);
	for ( int i=0; i<threadcount; i++ )
	{
		para[i].id = i;
		para[i].str_chn = &str_chn;
		para[i].str_eng = &str_eng;
		para[i].str_short = &str_short;
		para[i].itemfile = itemfile;
		para[i].start = i*readlen;
		para[i].len = readlen;
		int ret = pthread_create(&threads[i], NULL, ReadItemProc, &para[i]);
		if ( ret )
		{
			Error("pthread_create return %d, errno is %d", ret, errno);
		}
	}

	for ( int i=0; i<threadcount; i++ )
	{
		pthread_join(threads[i], NULL );
	}
	Info("finish read item, %d chinese pieces, %d non chinese pieces, %d short words", 
			str_chn.size(), str_eng.size(), str_short.size());
}


static localnbr_rank_map_t* FindNeighborLocal(local_neighbor_map_t& neighbor_map, const string& str)
{
	if(neighbor_map.find(str) == neighbor_map.end())
	{
		localnbr_rank_map_t* new_map = new localnbr_rank_map_t();
		new_map->l_neighbor = new ite_rank_map_t();
		new_map->r_neighbor = new ite_rank_map_t();
		neighbor_map[str] = new_map; 
	}
	return neighbor_map[str];
}

static void PushNeighborLocal(local_neighbor_map_t& neighbor_map, const string& str, const string& ln, const string& rn)
{
	localnbr_rank_map_t& map = *FindNeighborLocal(neighbor_map, str);
	ite_rank_map_t &l_neighbor = *(map.l_neighbor), &r_neighbor = *(map.r_neighbor);
	if( ln != "")
	{
		if(l_neighbor.find(ln) == l_neighbor.end()) (l_neighbor)[ln] = 1;
		else (l_neighbor)[ln]++; 
	}
	if( rn != "")
	{
		if(r_neighbor.find(rn) == r_neighbor.end()) (r_neighbor)[rn] = 1;
		else (r_neighbor)[rn]++; 
	}
}

static nbr_rank_map_t* FindNeighbor(neighbor_map_t& neighbor_map, const string& str)
{
	if(neighbor_map.find(str) == neighbor_map.end())
	{
		nbr_rank_map_t* new_map = new nbr_rank_map_t();
		pthread_mutex_init(&(new_map->l_lock), NULL);
		pthread_mutex_init(&(new_map->r_lock), NULL);
		new_map->l_neighbor = new ite_rank_map_t();
		new_map->r_neighbor = new ite_rank_map_t();
		neighbor_map[str] = new_map; 
	}
	return neighbor_map[str];
}

static void MergeLocalNeighbor(neighbor_map_t& neighbor_all, local_neighbor_map_t& neighbor)
{
	for(local_neighbor_map_t::iterator it = neighbor.begin(); it != neighbor.end(); it++)
	{
		pthread_mutex_lock(&g_lock);
		nbr_rank_map_t& nmap = *FindNeighbor(neighbor_all, it->first);
		pthread_mutex_unlock(&g_lock);
		localnbr_rank_map_t& localnmap = *(it->second);
		pthread_mutex_lock(&(nmap.l_lock));
		ite_rank_map_t &l_neighbor = *(nmap.l_neighbor), &r_neighbor=*(nmap.r_neighbor); 
		ite_rank_map_t &ll_neighbor = *(localnmap.l_neighbor), &lr_neighbor=*(localnmap.r_neighbor); 
		for(ite_rank_map_t::iterator itt = ll_neighbor.begin(); itt != ll_neighbor.end(); itt++)
		{
			if(l_neighbor.find(itt->first) == l_neighbor.end()) l_neighbor[itt->first] = itt->second;
			else l_neighbor[itt->first] += itt->second;
		}
		pthread_mutex_unlock(&(nmap.l_lock));
		pthread_mutex_lock(&(nmap.r_lock));
		for(ite_rank_map_t::iterator itt = lr_neighbor.begin(); itt != lr_neighbor.end(); itt++)
		{
			if(r_neighbor.find(itt->first) == r_neighbor.end()) r_neighbor[itt->first] = itt->second;
			else r_neighbor[itt->first] += itt->second;
		}
		pthread_mutex_unlock(&(nmap.r_lock));
		delete(it->second->l_neighbor);
		delete(it->second->r_neighbor);
		delete(it->second);
	}
}

static void PushAllNeighbor(const char* infile, neighbor_map_t& neighbor_all, 
		const int wordlen, const int id, const int threads, const int group, const int groups)
{
	Info("thread%d start to find neighbor with max word len %d", id, wordlen);
	ifstream fin(infile);
	if(fin == NULL) Info("cannot read %s", infile);

	local_neighbor_map_t neighbor;
	str_bucket_t str_chn;
	string line;
	int linecount=0;

	while(getline(fin,line)!=NULL)
	{
		if(linecount%threads != id)
		{
			linecount++;
			continue;
		}

		if(id==0 && linecount%10000000 <= threads)  Debug("thread %d processed %d", id, linecount);
		linecount++;
		const char* txt;

		//get "title","tags","introduction"
		size_t found = 0;
		for(int i=0; i<7; i++)
		{
			found = line.find(",", ++found);
		}
		if(found == string::npos) txt = line.c_str();
		else txt = line.substr(found, line.size()).c_str();

		//strip ", we have title,tag1,tag2...tagn,introduction
		line.assign(txt);
		EraseChar(line, '\"');

		//split by ,
		str_bucket_t buffer;
		SplitByChar(buffer, line, ',');

		//process each string, and identify if the string contains Chinese character
		while(!buffer.empty()){
			//identify Chinese characters
			line = *(buffer.begin());
			int count = 0;
			bool is_chn = false;
			size_t scan = 0;
			string word;
			while(scan < line.size())
			{
				count ++;
				word = GetGBKChar(line, scan);
				if( isGBK(word))
				{
					if(!is_chn ) is_chn = true;
				}
				scan += word.size();
			}
			if(is_chn)	
			{
				if(count > 2) 
					str_chn.push_back(line);
			}
			//else str_eng.push_back(line);
			buffer.erase(buffer.begin());
		}

		for(str_bucket_t::iterator str=str_chn.begin(); str!=str_chn.end(); str++)
		{
			size_t scan = 0;
			while(scan < (*str).size())
			{
				string first = GetGBKChar(*str, scan);
				if( !isGBK(first) ){
					scan += first.size();
					continue;
				}
				for(int i=2; i<=wordlen; i++)
				{
					string lword = "";
					string rword = "";
					string word = GetGBKString(*str, scan, i);
					if( 0 == word.compare("") || 4 > word.size()) break;
					if(scan >= 2) 
					{
						lword = GetGBKChar(*str, scan-2);
						if( !isGBK(lword) ) lword = "";
					}
					if(scan <= (*str).size() -2)
					{
						rword = GetGBKChar(*str, scan+word.size());
						if( !isGBK(rword) ) rword = "";
					}
					int magic = (unsigned int)td_hash_string(word.c_str())%(groups);
					if(magic==group)
						PushNeighborLocal(neighbor, word, lword, rword);
				}
				chn_chars++;
				scan += first.size();
			}
		}
		str_chn.clear();
	}
	fin.close();

	Info("thread%d finish reading, neighbor %d",
			id, neighbor.size());

	MergeLocalNeighbor(neighbor_all, neighbor);
	Info("thread%d finish merging, neighbor_all %d",
			id, neighbor_all.size());
}

struct PushNbrPara
{
	int id, wordlen, threads, group, groups;
	neighbor_map_t *neighbor;
	const char* infile;
};

void* PushNbrProc(void* para)
{
	PushAllNeighbor(((PushNbrPara*)para)->infile,
			*(((PushNbrPara*)para)->neighbor),
			((PushNbrPara*)para)->wordlen,
			((PushNbrPara*)para)->id,
			((PushNbrPara*)para)->threads,
			((PushNbrPara*)para)->group,
			((PushNbrPara*)para)->groups);
	return NULL;
}

double Entropy(nbr_rank_map_t &map, const char lr)
{
	ite_rank_map_t &bucket = (lr == 'l') ? *(map.l_neighbor) : *(map.r_neighbor);
	double ret = 0e0; 
	double size = (double)bucket.size();
	for(ite_rank_map_t::iterator it = bucket.begin(); it != bucket.end(); it++)
	{
		ret += -(it->second)/size * log((it->second)/size);
	}
	return ret;
}

long int stringsize = 0;

void PushAllNeighborAndCalEntropy(const char* infile, entro_rank_map_t& emap, 
		const int wordlen, 
		const int threadcount, 
		const int groups, 
		const double threshold)
{
	Info("use %d threads to push neighbor", threadcount);
	pthread_t threads[threadcount];
	PushNbrPara para[threadcount];

	for(int m=0; m<groups; m++)
	{
		neighbor_map_t neighbor;
		Info("group %d reading", m);
		for ( int i=0; i<threadcount; i++ )
		{
			para[i].id = i;
			para[i].group = m;
			para[i].groups = groups;
			para[i].wordlen = wordlen;
			para[i].threads = threadcount;
			para[i].neighbor = &neighbor;
			para[i].infile = infile;
			int ret = pthread_create(&threads[i], NULL, PushNbrProc, &para[i]);
			if ( ret )
			{
				Error("pthread_create return %d, errno is %d", ret, errno);
			}
		}

		for ( int i=0; i<threadcount; i++ )
		{
			pthread_join(threads[i], NULL );
		}

		Info("group %d calculating entropy", m);
		for(neighbor_map_t::iterator it = neighbor.begin(); it!=neighbor.end(); it++)
		{
			float entrol = Entropy(*(it->second), 'l');
			float entror = Entropy(*(it->second), 'r');
			if( entrol > threshold && entror > threshold)
			{
				struct Entrorank ite;
				ite.entrol = entrol;
				ite.entror = entror;
				ite.item = it->first;
				stringsize += strlen(ite.item.c_str())*sizeof(char);
				emap.push_back(ite);
			}
			delete(it->second->l_neighbor);
			delete(it->second->r_neighbor);
			delete(it->second);
		}
		Info("group %d finish calculating entropy map size %d %fG", m, emap.size(), (stringsize+sizeof(emap))/float(1024*1024));
	}
	Info("sorting");
	sort(emap.begin(), emap.end(), entrorankcomp);
	Info("end calculating");
}

static void CalFrequency(const char* infile, ite_rank_map_t& frequency_all, const int wordlen, const int id, const int threads)
{
	Info("thread%d start to cal frequency with max word len %d", id, wordlen);
	ite_rank_map_t frequency;
	str_bucket_t str_chn;
	ifstream fin(infile);
	if(fin == NULL) Info("cannot read %s", infile);

	string line;
	int linecount=0;

	while(getline(fin,line)!=NULL)
	{
		if(linecount%threads != id)
		{
			linecount++;
			continue;
		}

		linecount++;
		const char* txt;

		//get "title","tags","introduction"
		size_t found = 0;
		for(int i=0; i<7; i++)
		{
			found = line.find(",", ++found);
		}
		if(found == string::npos) txt = line.c_str();
		else txt = line.substr(found, line.size()).c_str();

		//strip ", we have title,tag1,tag2...tagn,introduction
		line.assign(txt);
		EraseChar(line, '\"');

		//split by ,
		str_bucket_t buffer;
		SplitByChar(buffer, line, ',');

		//process each string, and identify if the string contains Chinese character
		while(!buffer.empty()){
			//identify Chinese characters
			line = *(buffer.begin());
			int count = 0;
			bool is_chn = false;
			size_t scan = 0;
			string word;
			while(scan < line.size())
			{
				count ++;
				word = GetGBKChar(line, scan);
				if( isGBK(word))
				{
					if(!is_chn ) is_chn = true;
				}
				scan += word.size();
			}
			if(is_chn)	
			{
				if(count <= 2) str_chn.push_back(line);
				else str_chn.push_back(line);
			}
			//else str_eng.push_back(line);
			buffer.erase(buffer.begin());
		}

		for(str_bucket_t::iterator str = str_chn.begin(); str != str_chn.end(); str++)	
		{
			size_t scan = 0;
			while(scan < (*str).size())
			{
				string first = GetGBKChar(*str, scan);
				if( !isGBK(first) ){
					scan += first.size();
					continue;
				}
				ite_rank_map_t::iterator it = frequency.find(first);
				if(it == frequency.end()) frequency[first] = 1;
				else frequency[first]++;
				for(int i=2; i<=wordlen; i++)
				{
					string word = GetGBKString(*str, scan, i);
					ite_rank_map_t::iterator it = frequency.find(word);
					if(it == frequency.end()) frequency[word] = 1;
					else frequency[word]++;
				}
				scan += first.size();
			}
		}
		str_chn.clear();
	}
	fin.close();
	Info("thread%d finish cal frequency, size=%d", id, frequency.size());
	pthread_mutex_lock(&g_lock);
	Info("thread%d start to merge", id);
	for(ite_rank_map_t::iterator it = frequency.begin(); it!=frequency.end(); it++)
	{
		ite_rank_map_t::iterator itt = frequency_all.find(it->first);
		if(itt == frequency_all.end()) frequency_all[it->first] = it->second;
		else frequency_all[it->first]+=(it->second);
	}
	Info("thread%d finish merge, size=%d", id, frequency_all.size());
	pthread_mutex_unlock(&g_lock);
	frequency.clear();
}

struct CalFrePara
{
	int id, wordlen, threads;
	ite_rank_map_t* frequency;
	const char* infile;
};

void* CalFrequencyProc(void* para)
{
	CalFrequency((((CalFrePara*)para)->infile),
			*(((CalFrePara*)para)->frequency),
			((CalFrePara*)para)->wordlen,
			((CalFrePara*)para)->id,
			((CalFrePara*)para)->threads);
	return NULL;
}

void CalFrequency1(const char* infile, ite_rank_map_t& frequency, const int wordlen, const int threadcount)
{
	pthread_t threads[threadcount];
	CalFrePara para[threadcount];

	Info("use %d threads to count frequency", threadcount);
	for ( int i=0; i<threadcount; i++ )
	{
		para[i].id = i;
		para[i].wordlen = wordlen;
		para[i].frequency = &frequency;
		para[i].infile = infile;
		para[i].threads = threadcount;
		int ret = pthread_create(&threads[i], NULL, CalFrequencyProc, &para[i]);
		if ( ret )
		{
			Error("pthread_create return %d, errno is %d", ret, errno);
		}
	}

	for ( int i=0; i<threadcount; i++ )
	{
		pthread_join(threads[i], NULL );
	}
	Info("count frequency end, size=%d", frequency.size());
}


double MaxOccur(double* mat, const int start, const int end, const int wordlen)
{
	if(start == end) return mat[end]; 
	double max = 0;
	for(int i=0; i< end-start+1; i++)
	{

		double ret;
		if(start + i == end)
		{
			if(start == 0) continue;
			else ret = mat[i*wordlen + start];
		}
		else ret = mat[i*wordlen + start] * MaxOccur(mat, start+i+1, end, wordlen);
		if(max < ret) max = ret;
	}
	return max;
}

void IndependenceCheck(entro_rank_map_t& emap, ite_rank_map_t& frequency, ite_rank_map_t& independence, const float threshold, const float occur)
{
	Info("start independence check entro%d fre%d inde%d", emap.size(), frequency.size(), independence.size());
	vector<string> to_del;
	for(entro_rank_map_t::iterator it = emap.begin(); it!=emap.end(); it++)
	{
		int wordlen = (it->item).size()/2;
		double mat[wordlen*wordlen];
		string str(it->item);
		for(int i=0; i<wordlen; i++)
		{
			for(int j=0; j<wordlen-i; j++)
			{
				string temp = GetGBKString(str, 2*i, j+1); 
				ite_rank_map_t::iterator it1 = frequency.find(temp);
				if(it1 == frequency.end()) mat[j*wordlen+i] = 0;
				else mat[j*wordlen+i] = (it1->second)/double(chn_chars);
			}
		}

		double inde = MaxOccur(mat, 0, wordlen-1, wordlen);
		double concur;
		ite_rank_map_t::iterator itt1 = frequency.find(str);
		if(itt1 != frequency.end()) 
		{
			concur = (itt1->second)/double(chn_chars);
		}
		if(concur <= inde*threshold || concur <= occur/double(chn_chars))
		{
			to_del.push_back(str);
			Debug("rule out %s,%lf,%lf,%le,%le,%lf,%lf", str.c_str(), it->entrol, it->entror, inde, concur,concur/inde, itt1->second);	
			continue;
		}

		ite_rank_map_t::iterator itt2 = independence.find(str);
		if(itt2 == independence.end())
		{
			independence[str] = concur/inde;
		}
	}
	Info("%d words to rule out", to_del.size());
	entro_rank_map_t::iterator peek = emap.end()-1;
	entro_rank_map_t::iterator back = emap.end()-1;
	int count=0;
	Entrorank temp;
	do
	{
		if(to_del.empty() == true) break;
		if(to_del.back() == peek->item)
		{
			temp = *peek;
			*peek = *back;
			*back = temp;
			to_del.pop_back();
			back--;
			count++;
		}
		peek--;
	}while(peek != emap.begin() - 1);
	emap.resize(emap.size()-count);
	sort(emap.begin(), emap.end(), entrorankcomp);

	Info("end independence check entro%d fre%d inde%d", emap.size(), frequency.size(), independence.size());
}


