//
//  main.cpp
//  iornn-parser-c
//
//  Created by Phong Le on 27/12/14.
//  Copyright (c) 2014 Phong Le. All rights reserved.
//

#include <stdio.h>

/*
 * Reranker.cpp
 *
 *  Created on: Dec 24, 2014
 *      Author: Phong
 */

#include <string>
#include <iostream>
#include <fstream>
#include <vector>
#include <getopt.h>
#include <math.h>
#include <time.h>
#include <stdlib.h>
#include <regex>

#include "Matrix.h"
#include "Default.h"
#include "Dictionary.h"
#include "Treebank.h"
#include "Depstruct.h"
#include "Deptree.h"
#include "CGRNN.h"
#include "CGRNNTrainer.h"

#include "paraphrase_detector/PDCGRNN.h"
#include "paraphrase_detector/PDCGRNNTrainer.h"
#include "paraphrase_detector/PDCorpus.h"

#include "omp.h"

using namespace std;

//----------------------------- default -----------------------------

int dim = 4;

// for training
real lambda = 1e-4;
real lambdaL = 1e-6;
real largeRandRange = 0.1;
real smallRandRange = 0.1;

real paramLearningRate = .1;
real learningRateDecay = 0;
real wembLearningRate = .1;
int evalDevStep = 20;
int maxDecreaseCount = 3;

int maxNEpoch = 1000;
int batchSize = 20;

int nThreads = 1;

int maxGenLen = 20;
int kDeprels = 1;
int kPos = 1;
int kWords = 10;
int beamsize = 100;

int softmaxEst = SOFTMAX_NCE;
int kNeg = 5;

default_random_engine generator;

// paths
string dataDir = "./data/toy/data/";
string dicDir = "./data/toy/dic/";
string wembSource = "collobert";
string modelDir = "model/";

string trainFile="train.conll";
string devFile="dev-lq15.conll";
string testFile="dev-lq15.conll";

// -------------------------------------------------------------------

void loadParams(string fname) {
	ifstream f(fname);
	if (f.good() == false) return;

	string line;
	while (getline(f, line)) {
		vector<string> comps = Utils::splitString(line, "[\t ]\+");
		if (comps.size() != 2) continue;

		if (comps[0] == "dim")
			dim = stoi(comps[1]);

		// for training
		if (comps[0] == "lambda")
			lambda = (real)stod(comps[1]);
		if (comps[0] == "lambdaL")
			lambdaL = (real)stod(comps[1]);
		if (comps[0] == "largeRandRange")
			largeRandRange = (real)stod(comps[1]);
		if (comps[0] == "smallRandRange")
			smallRandRange = (real)stod(comps[1]);

		if (comps[0] == "paramLearningRate")
			paramLearningRate = (real)stod(comps[1]);
		if (comps[0] == "learningRateDecay")
			learningRateDecay = (real)stod(comps[1]);
		if (comps[0] == "wembLearningRate")
			wembLearningRate = (real)stod(comps[1]);

		if (comps[0] == "maxNEpoch")
			maxNEpoch = stoi(comps[1]);
		if (comps[0] == "batchSize")
			batchSize = stoi(comps[1]);

		if (comps[0] == "maxDecreaseCount")
			maxDecreaseCount = stoi(comps[1]);
		if (comps[0] == "evalDevStep")
			evalDevStep = stoi(comps[1]);

		if (comps[0] == "nThreads")
			nThreads = stoi(comps[1]);


		// paths
		if (comps[0] == "dataDir")
			dataDir = comps[1];
		if (comps[0] == "dicDir")
			dicDir = comps[1];
		if (comps[0] == "wembSource")
			wembSource = comps[1];
		if (comps[0] == "modelDir")
			modelDir = comps[1];

		if (comps[0] == "trainFile")
			trainFile = comps[1];
		if (comps[0] == "devFile")
			devFile = comps[1];
		if (comps[0] == "testFile")
			testFile = comps[1];
	}
	f.close();
}

vector<Matrix*> loadWordEmbeddings(string fname) {
	vector<Matrix*> L;
	fstream f(fname, ios::in);
	int nwords = 0;
	int wdim = 0;
	f >> nwords;
	f >> wdim;

	for (int i = 0; i < nwords; i++) {
		Matrix* we = new Matrix(wdim);
		for (int j = 0; j < wdim; j++)
			f >> we->data[j];
		L.push_back(we);
	}
	return L;
}

void senGenTrain(CGRNN* net) {

	// load treebanks
	cout << "load treebanks" << endl;
	Treebank* trainTb = Treebank::load(dataDir+"/"+trainFile, net->vocaDic, net->posDic, net->deprelDic);
	Treebank* devTb = Treebank::load(dataDir+"/"+devFile, net->vocaDic, net->posDic, net->deprelDic);
	devTb->storage.resize(5);

	// shuffle train treebank
	cout << "shuffle train treebank" << endl;
	trainTb->shuffle();

	// train
	CGRNNTrainer trainer;
	trainer.train(net, trainTb, devTb);

	Treebank::free(trainTb);
	Treebank::free(devTb);
}

void senGenTrain(string paramFile) {
	loadParams(paramFile);

	cout << "load vocabulary and word embeddings" << endl;
	string path = dicDir+ "/" + wembSource;
	Dictionary* vocaDic = Dictionary::create(path + "/words.lst", TEMPLATE_COLLOBERT);
	if (softmaxEst == SOFTMAX_HIER)
		vocaDic->loadBinCode(path + "/word_code.lst");
	else if (softmaxEst == SOFTMAX_NCE)
		vocaDic->loadFreq(path + "/word_freq.txt");

	cout << "load word embeddings" << endl;
	vector<Matrix*> L = loadWordEmbeddings(path+"/embeddings.txt");

	Dictionary* posDic = Dictionary::create(dicDir + "/pos.lst", TEMPLATE_NONE);
	Dictionary* deprelDic = Dictionary::create(dicDir+"/deprel.lst", TEMPLATE_NONE);

	CGRNN* net = new CGRNN(dim, vocaDic, posDic, deprelDic, L);

	cout << "training" << endl;
	//senGenTrain(net);


	//---------------- gradient check --------------------//
	Treebank* tb = Treebank::load(dataDir+"/"+trainFile, vocaDic, posDic, deprelDic);
	net->checkGradient(tb);
	Treebank::free(tb);


	Dictionary::free(vocaDic);
	Dictionary::free(posDic);
	Dictionary::free(deprelDic);
	delete net;
}

void senGenEval(string netfilename) {
	CGRNN* net = CGRNN::load(netfilename);
	CGRNNTrainer trainer;

	Treebank* testTb = Treebank::load(dataDir+"/"+testFile, net->vocaDic, net->posDic, net->deprelDic);
	testTb->storage.resize(10);

	trainer.eval(net, testTb);
}

void paraDetectTrain(PDCGRNN* net) {

	// load treebanks
	cout << "load corpus" << endl;
	PDCorpus* traindata = new PDCorpus(dataDir+"/"+trainFile, net->vocaDic, net->posDic, net->deprelDic, true);
	PDCorpus* devdata = new PDCorpus(dataDir+"/"+devFile, net->vocaDic, net->posDic, net->deprelDic);

	// shuffle train treebank
	cout << "shuffle train treebank" << endl;
	traindata->shuffle();

	// train
	PDCGRNNTrainer trainer;
	trainer.train(net, traindata, devdata);

	PDCorpus::free(traindata);
	PDCorpus::free(devdata);
}

void paraDetectTrain(string paramFile) {
	loadParams(paramFile);

	cout << "load vocabulary and word embeddings" << endl;
	string path = dicDir+ "/" + wembSource;
	Dictionary* hugevocaDic = Dictionary::create(path + "/words.lst", TEMPLATE_GLOVE);
	hugevocaDic->loadBinCode(path + "/word_code.lst");
	cout << "load word embeddings" << endl;
	vector<Matrix*> hugeL = loadWordEmbeddings(path+"/embeddings.txt");
	vector<Matrix*> L;
	Dictionary* vocaDic = hugevocaDic->removeUnusedWord(hugeL, L);
	Dictionary::free(hugevocaDic);
	for (Matrix* m : hugeL) delete m;
	

	Dictionary* posDic = Dictionary::create(dicDir + "/pos.lst", TEMPLATE_NONE);
	Dictionary* deprelDic = Dictionary::create(dicDir+"/deprel.lst", TEMPLATE_NONE);

	PDCGRNN* net = new PDCGRNN(dim, vocaDic, posDic, deprelDic, L);

	cout << "training" << endl;
	paraDetectTrain(net);

/*
	//---------------- gradient check --------------------//
	PDCorpus* data = new PDCorpus(dataDir+"/"+trainFile, vocaDic, posDic, deprelDic);
	net->checkGradient(data);
	PDCorpus::free(data);
*/

	Dictionary::free(vocaDic);
	Dictionary::free(posDic);
	Dictionary::free(deprelDic);
	delete net;
}


int main(int argc, char *argv[]) {

	openblas_set_num_threads(1);
    omp_set_num_threads(nThreads);

	srand ( time(NULL) );
	//paraDetectTrain(string(""));

	senGenTrain(string(""));
	//senGenEval("Release/model/model200");


/*
	string path = dicDir+ "/" + wembSource;

	vector<Matrix*> L = loadWordEmbeddings(path+"/embeddings.txt");
	Matrix* mean = Matrix::zeros(L[0]->rows);
	for (Matrix* m : L)
		mean->addi(m);
	mean->divi(L.size());
	for (int i = 0; i < mean->length; i++)
		cout << mean->data[i] << " ";
*/
/*
	Utils::normalizedText("/home/phong/workspace/iornn-sen-gen/data/BNC/texts.txt",
			"/home/phong/workspace/iornn-sen-gen/data/YM-s/dic/glove/words.lst", "/tmp/bnc.txt");
*/

/*
	if (argc == 3 && string(argv[1]) == "-train")
		train(string(argv[2]));

	else if (argc == 7 && string(argv[1]) == "-parse") {
		string netPath = string(argv[3]);
		string initPath = string(argv[2]);
		int kbest = atoi(argv[4]);
		real alpha = (real)atof(argv[5]);
		string output = string(argv[6]);

		loadParams(initPath);
		string testPath = dataDir + "/" + testFile;
	}

	else {
		cout<< "for training:" << endl << "\t-train init_file" << endl
				<< "for parsing:" << endl << "\t-parse init_file net_file kbest alpha output_file" << endl;
	}
	*/

}




