//
//  IORNNTrainer.cpp
//  iornn-parser-c
//
//  Created by Phong Le on 26/12/14.
//  Copyright (c) 2014 Phong Le. All rights reserved.
//

#include "CGRNNTrainer.h"
#include "Depstruct.h"
#include "SenGenerator.h"
#include "Corpus.h"

void CGRNNTrainer::adagrad(CGRNN* net, Param* grad, unordered_set<int>& tWords, unordered_set<int>& tPath) {
	real paramClr = paramLearningRate; // / (1 + nEvals*learningRateDecay);
	real wembClr = wembLearningRate; // / (1 + nEvals*learningRateDecay);

	if (paramVarience == NULL) {
		paramVarience = net->createGrad();

		for (unsigned int i = 0; i < paramVarience->weights.size(); i++)
			nEvals.push_back(0);
	}

	// for weights # Wword, L
	for (int i = 0; i < grad->nMatricesWoWE; i++) {
		Matrix* W = grad->weights[i];
		Matrix* temp = W->mul(W);
		paramVarience->weights[i]->addi(temp); delete temp;

		if (nEvals[i] > 1) {
			Matrix* std = paramVarience->weights[i]->sqrt()->addi(1e-10f);
			net->params->weights[i]->addi(grad->weights[i]->divi(std)->muli(-paramClr));
			delete std;
		}
		nEvals[i]++;
	}

	// for Wword
	for (int p : tPath) {
		Matrix* temp = grad->Wword[p]->mul(grad->Wword[p]);
		paramVarience->Wword[p]->addi(temp); delete temp;

		if (nEvals[grad->nMatricesWoWE + p] > 1) {
			Matrix* std = paramVarience->Wword[p]->sqrt()->addi(1e-10f);
			net->params->Wword[p]->addi(grad->Wword[p]->divi(std)->muli(-paramClr));
			delete std;
		}
		nEvals[grad->nMatricesWoWE + p]++;
	}

	// for L
	for (int word : tWords) {
		Matrix* temp = grad->L[word]->mul(grad->L[word]);
		paramVarience->L[word]->addi(temp); delete temp;

		if (nEvals[grad->nMatricesWoWE + grad->Wword.size() + word] > 1) {
			Matrix* std = paramVarience->L[word]->sqrt()->addi(1e-10f);
			net->params->L[word]->addi(grad->L[word]->divi(std)->muli(-wembClr));
			delete std;
		}
		nEvals[grad->nMatricesWoWE + grad->Wword.size() + word]++;
	}
}

void CGRNNTrainer::eval(CGRNN* net, Container* devCp) {
	Treebank* devTb = (Treebank*)devCp;
	SenGenerator sengen;
	for (Deptree* tree : devTb->storage) {
		net->compForward(tree);
		cout << "-------------------------" << endl;
		cout << tree->ds->toString(net->posDic, net->deprelDic) << endl;

		vector<pair<real,Deptree*>> results;
		sengen.generate(net, tree->cRep[0], tree->nNodes, results);
		if (results.empty()) {
			cout << "no tree" << endl;
			tree->freeTempArrays();
			continue;
		}
		for (int i = 0; i < 1; i++) {
			Deptree* newtree = results[i].second;
			real score = results[i].first;

			vector<string> sentence;
			newtree->getSentence(net->vocaDic, net->posDic, net->deprelDic, sentence);

			cout << score << endl;
			for (unsigned int i = 0; i < sentence.size(); i++)
				cout << sentence[i] << endl;
			cout << endl;

			Deptree::free(newtree);
		}

		tree->freeTempArrays();
	}
}

void CGRNNTrainer::train(CGRNN* net, Container* tb, Container* devTb) {
	int nSample = tb->size();
	unordered_set<int> tWords;
	unordered_set<int> tPath;

	Param** gradList = new Param*[nThreads];
	for (int i = 0; i < nThreads; i++)
		gradList[i] = net->createGrad();

	int epoch = 0;
	struct timeval start, finish;

	int j = 0;
	real percent = 0;
	real percentS = 0;

	int decreaseCount = 0;


	epoch++;
	cout << "=========== epoch " << epoch << " ========" << endl;;
	cout << Utils::currentDateTime() << endl;

	while (true) {
		j++;
		int startId = (j-1) * batchSize;
		int endId = min(nSample-1, j*batchSize-1);

		if (startId >= nSample) {
			if (epoch % evalDevStep == 0) {
				net->save(modelDir + "/model" + std::to_string(epoch));

				// eval
				cout << "++++++ EVAL... ++++++" << endl;
				gettimeofday(&start, NULL);
				//eval(net, tb);
				eval(net, devTb);
				gettimeofday(&finish, NULL);
				printf("[%.2fs]\n", ((double)(finish.tv_sec-start.tv_sec)*1000000 + (double)(finish.tv_usec-start.tv_usec)) / 1000000);

				cout << decreaseCount << " consecutive decreases" << endl;

				// early stop
				if (decreaseCount >= maxDecreaseCount)
					break;
			}

			j = 1;
			startId = (j-1) * batchSize;
			endId = min(nSample-1, j*batchSize-1);
			epoch++;
			percentS = 0;

			if (epoch > maxNEpoch) break;
			cout << "=========== epoch " << epoch << " ========" << endl;;
			cout << Utils::currentDateTime() << endl;
		}

		gettimeofday(&start, NULL);
		real cost = net->computeCostAndGrad(tb, startId, endId, gradList, tWords, tPath);
		adagrad(net, gradList[0], tWords, tPath);

		if (j % 100 == 0) {
			cout << "batch " << j << "\t" << cost << "\t";
			gettimeofday(&finish, NULL);
			printf("[%.2fs]\t", ((double)(finish.tv_sec-start.tv_sec)*1000000 + (double)(finish.tv_usec-start.tv_usec)) / 1000000);
			cout << endl;
		}

		percent = endId * 100. / nSample;
		if (percent >= percentS) {
			cout << Utils::currentDateTime() << "\t";
			printf("%.2f%%", percent);
			percentS += 20;
			cout << endl;
		}
	}
	for (int i = 0; i < nThreads; i++)
		Param::free(gradList[i]);
	delete[] gradList;
}



