#include <string>
#include <iostream>
#include <fstream>
#include <sstream>
#include <map>
#include <set>
#include <vector>
#include <math.h>
#include <stdlib.h>

#include "lmgen.h"

using namespace std;

LMgen::LMgen(int n) : size(n),map2unk(false),unkidx(0),unseen(0) {
	for(int i = 0; i < n; i++) 
		xgrams.push_back(new Ngram(i));
	l = new int[size];
	corpsize = 0;
}

LMgen::~LMgen() {
	if( corpus.is_open() )
		corpus.close();
	if( testcorp.is_open () )
		testcorp.close();
	delete [] l;
}

string LMgen::getToken(istream& input) {
	string word = "";
	while( !input.eof() ) {
		char ch = input.get();
		if( 	(ch == ' ') ||
			(ch == '\n') ) {
			if(word == "") continue;
			return word;
		}
		else
			word += ch;
	}
	//Semi-predicate problem, 
	return word == "" ? "</EOF>" : word;
}

inline void LMgen::incrementWindow() {
	for(int i = 1; i < size; i++)
		l[i-1] = l[i];
}

inline bool LMgen::isCompleteWindow(int n) {
	for(int i = 0; i < n; i++)
		if(l[i] == -1)
			return false;
	return true;
}

inline string LMgen::idx2string(int n) {
	ostringstream out;
	for(int i = 0; i < n; i++) {
		out << l[i];
		if(i != n-1)
			out << "_";
	}
	return out.str();
}

//for <convinience>
inline static string int2string(int n) {
	ostringstream out;
	out << n;
	return out.str();
}
inline static string idx2string2(int l, int ll) {
	ostringstream out;
	out << l << "_" << ll;
	return out.str();
}
inline static string idx2string3(int l, int ll, int lll) {
	ostringstream out;
	out << l << "_" << ll << "_" << lll;
	return out.str();
}
//</convinience>

void LMgen::genNgram(int n) {
	if( isCompleteWindow(n) ) {
		string idx = idx2string(n);
		Wordmap::iterator it = xgrams[n-1]->ngram.find(idx);
		if( it != xgrams[n-1]->ngram.end() )
			xgrams[n-1]->ngram[idx]++;
		else
			xgrams[n-1]->ngram[idx] = 1;
	}
}

void LMgen::genCountcnt(int n) {
	for(Wordmap::const_iterator iter = xgrams[n-1]->ngram.begin(); iter != xgrams[n-1]->ngram.end(); iter++) {
		Countmap::iterator cit = xgrams[n-1]->gramcounts.find(iter->second);
		if( cit != xgrams[n-1]->gramcounts.end() )
			xgrams[n-1]->gramcounts[iter->second]++;
		else
			xgrams[n-1]->gramcounts[iter->second] = 1;
	}
}

int LMgen::processWords() {
	string tok;
	while( (tok = getToken(corpus)) != "</EOF>" ) {
		Vocab::iterator iter = voc.find(tok);
		if( iter != voc.end() ) {
			//Word found
			l[size-1] = symtblidx[tok];
		}
		else {
			//Word unknown
			if( map2unk ) {
				//Keep track of missed words
				Vocab::iterator mit = missvoc.find(tok);
				if( mit != missvoc.end() )
					; //we missed tok more than once 
				else {
					misscnt++;
					missvoc.insert(tok);	
				}
				//Remap to unk
				tok = "<UNK>";
				l[size-1] = unkidx;
			}
			else {
				//Add word to vocab
				symtbl.push_back(tok);
				l[size-1] = symtbl.size() - 1;
				symtblidx[tok] = l[size-1];
				
				voc.insert(tok);
			}
		}

		for(int i = 1; i <= size; i++)
			genNgram(i);
		incrementWindow();
		corpsize++;
	}
	//generate Count-counts
	for(int i = 1; i <= size; i++)
		genCountcnt(i);
	for(Vocab::const_iterator iter = voc.begin(); iter != voc.end(); iter++) {
		Wordmap::iterator it = xgrams[0]->ngram.find(*iter);
		if( it == xgrams[0]->ngram.end() )
			unseen++;
	}
	cerr << "Vocabulary has " << unseen << " unobserved words " << endl;
	m_bbi = bbi();
	m_buni = buni();
	return misscnt;
}

float LMgen::calcPP(string filename) {
	testcorp.open(filename.c_str());
	string line,tok,lasttok="<s>";
	float f = 0.;
	int sentencelength = 0;
	while( getline(testcorp,line) ) {
		istringstream in(line);
		int cursentlength = 1;
		while( (tok = getToken(in)) != "</EOF>" ) {
			if( lasttok != "<s>" ) {
				//TODO: log needed here, mapping to unk
				f += log10(bigramprob[lasttok + "_" + tok]);
			}
			lasttok = tok;
			cursentlength++;
		}
		sentencelength += cursentlength;
	}
	return exp((-1)*f/((float) sentencelength));
}

void LMgen::dumpNgrams(int n) {
	char dummy;
	for(Wordmap::const_iterator iter = xgrams[n-1]->ngram.begin(); iter != xgrams[n-1]->ngram.end(); iter++) {
		istringstream in(iter->first);
		int *l = new int[n];
		for(int i = 0; i < n; i++) 
			if( i < n-1 )
				in >> l[i] >> dummy;
			else
				in >> l[i];
		cout << iter->second << " ";
		try { 
			for(int i = 0; i < n; i++) 
				if( i < n-1 )
					cout << symtbl.at(l[i]) << " ";
				else
					cout << symtbl.at(l[i]) << "\n";
		}
		catch (...) {	
			cerr << "Failed ngram: ";
			for(int i = 0; i < n; i++)
				cerr << l[i] << " ";
			cerr << endl;
		}
		delete [] l;
	}
}

void LMgen::loadCorpus(string filename) {
	corpus.open(filename.c_str());
	if( !corpus.is_open() ) {
		cerr << "Failed to open corpus in " << filename << endl;
		exit(1);
	}
}

void LMgen::loadVocab(string filename) {
	ifstream vocab(filename.c_str());
	if( !corpus.is_open() ) {
		cerr << "Failed to open vocabulary in " << filename << endl;
		cerr << "No vocabulary will be used" << endl;
		return;
	}

	string tmp;
	while( getline(vocab, tmp) ) {
		voc.insert(tmp);
		symtbl.push_back(tmp);
		if(tmp == "<unk>")
			unkidx = symtbl.size() - 1;
		symtblidx[tmp] = symtbl.size() - 1;
	}
	map2unk = true;
	vocab.close();
}

bool LMgen::loadBigramProb(string filename) {
	ifstream probfile(filename.c_str());
	if( !probfile.is_open() ) {
		cerr << "Failed to open probability-file " << filename << endl;
		return false;
	}

	string tmp;
	while( getline(probfile, tmp) ) {
		istringstream in(tmp);
		string word1,word2; float prob;
		in >> word1 >> word2 >> prob;
		bigramprob[word1 + "_" + word2] = prob;
	}
	probfile.close();
	return true;
}

//<probablity-functions>
float LMgen::bbi() {
//	xgrams[1]->gramcounts[1]	is N1(.,.)
//	xgrams[1]->gramcounts[2] 	is N2(.,.)
	float nenner = xgrams[1]->gramcounts[1] + xgrams[1]->gramcounts[2] * 2;
	return xgrams[1]->gramcounts[1]/nenner;
}

float LMgen::buni() {
//	xgrams[0]->gramcounts[1]	is N1(.)
//	xgrams[0]->gramcounts[2] 	is N2(.)
	float nenner = xgrams[0]->gramcounts[1] + xgrams[0]->gramcounts[2] * 2;
	return xgrams[0]->gramcounts[1]/nenner;
}

// general case - not in use atm
float LMgen::ball(int n) {
	float nenner = xgrams[n-1]->gramcounts[1] + xgrams[n-1]->gramcounts[2] * 2;
	return xgrams[n-1]->gramcounts[1]/nenner;
}

float LMgen::puni(string w) {
//	xgrams[0]->ngram[w] 	is N(w)
	int W = voc.size(); float N = corpsize;
	string iw = int2string(symtblidx[w]);
//	float sum2 = m_buni * (1./N);
	float sum2 = m_buni * (W - unseen)/N * 1/W;
	float sum1 = (xgrams[0]->ngram[iw] - m_buni)/N;
	sum1 = sum1 > 0 ? sum1 : 0;
	return sum1 + sum2;
}

float LMgen::pbi(string w, string v) {
	float W = voc.size();
	string iv = int2string(symtblidx[v]);
	string iw = int2string(symtblidx[w]);
	float sum2 = m_bbi * (W - nvcount[v]) /
			((float) xgrams[0]->ngram[iv]) *
			puni(w);
	float sum1 = (xgrams[1]->ngram[iv + "_" + iw] - m_bbi)/((float) xgrams[0]->ngram[iv]);
	sum1 = sum1 > 0 ? sum1 : 0;
	return sum1 + sum2;
}

void LMgen::checkPuni() {
	float sum = 0;
	for(Vocab::const_iterator iter = voc.begin(); iter != voc.end(); iter++) {
		float prob = puni(*iter);	
		cout << *iter << ": " << prob << "\n"; 
		sum += prob;
	}
	cerr << "Unigram-Sum is " << sum << endl;
}

void LMgen::checkBigramThe() {
	float sum = 0;
	for(Vocab::const_iterator iter = voc.begin(); iter != voc.end(); iter++) {
		float prob = pbi(*iter, "the");
		cout << "the, " << *iter << ": " << prob << "\n";
		sum += prob;	
	}
	cerr << "Bigram-Sum is " << sum << endl;
}

void LMgen::genNvCount() {
	for(Vocab::const_iterator i = voc.begin(); i != voc.end(); i++) {
		nvcount[*i] = 0;
		for(Vocab::const_iterator j = voc.begin(); j != voc.end(); j++) {
			int idxi = symtblidx[*i];
			int idxj = symtblidx[*j];
			string idx = idx2string2(idxi, idxj);
			// xgrams[1] is hardcoded, only bigrams needed
			Wordmap::iterator it = xgrams[1]->ngram.find(idx);
			if( it == xgrams[1]->ngram.end() )
				nvcount[*i]++;
		}
	}
}
//</probability-functions>

int main() {
	int gramdim = 3;
	LMgen generator(gramdim);
	cerr << "Loading files..";
	generator.loadCorpus("corpus.sentences");
	generator.loadVocab("vocab");
	cerr << "done" << endl;

	cerr << "Generating n-grams upto " << gramdim << "-grams and count-counts...";
	int missed = generator.processWords();
	cerr << "done" << endl;
	
	cerr << "Generating N(v,.) lookup...";
	generator.genNvCount();
	cerr << "done" << endl;
//	generator.checkBigramThe();

	cerr << "Missed " << missed << " (" << missed/generator.getCorpusSize()*100.0 << "%)" << endl;

	return 0;
}
