/*
 * IORNN.cpp
 *
 *  Created on: Dec 17, 2014
 *      Author: Phong
 */

#include "Utils.h"
#include <fstream>
#include "CGRNN.h"

using namespace std;

CGRNN::CGRNN(int dim, Dictionary* vocaDic, Dictionary* posDic, Dictionary* deprelDic, vector<Matrix*>& L) {
	this->vocaDic = (Dictionary*)vocaDic->share();
	this->posDic = (Dictionary*)posDic->share();
	this->deprelDic = (Dictionary*)deprelDic->share();

	this->dim = dim;
	this->wdim = L[0]->length;
	this->params = NULL;
	this->initParam(L);

	if (!vocaDic->freq.empty())
		noiseDistr = discrete_distribution<int>(vocaDic->freq.begin(), vocaDic->freq.end());
}

CGRNN::~CGRNN() {
	Dictionary::free(vocaDic);
	Dictionary::free(posDic);
	Dictionary::free(deprelDic);
	Param::free(params);
}

void CGRNN::initParam(vector<Matrix*>& L) {
	params = new Param(dim, wdim, vocaDic->size(), posDic->size(), deprelDic->size(), largeRandRange, smallRandRange);
	for (unsigned int i = 0; i < params->L.size(); i++)
		params->L[i]->copy(L[i]);
}

void CGRNN::save(string fname) {
	ofstream f(fname);

	f << dim << " " << wdim << endl;

	vocaDic->save(f);
	posDic->save(f);
	deprelDic->save(f);
	params->save(f);

	f << endl;
	f.close();
}

CGRNN* CGRNN::load(string fname) {
	ifstream f(fname);
	CGRNN* net = new CGRNN();

	f >> net->dim;
	f >> net->wdim;

	net->vocaDic = Dictionary::load(f);
	net->posDic = Dictionary::load(f);
	net->deprelDic = Dictionary::load(f);
	net->params = Param::load(f);

	f.close();
	return net;
}

Param* CGRNN::createGrad() {
	Param* p = new Param(dim, wdim, vocaDic->size(), posDic->size(), deprelDic->size(), 0, 0);
	p->fill(0);
	return p;
}

Matrix* CGRNN::func(Matrix* X) {
	Matrix* F = new Matrix(X->rows, X->cols);
	for (int i = 0; i < F->length; i++) {
		// softsign
		 //F->data[i] = X->data[i] / (1 + abs(X->data[i]));

		// tanh
		F->data[i] = tanh(X->data[i]);
	}
	return F;
}

Matrix* CGRNN::funcPrime(Matrix* F) {
	Matrix* Fp = new Matrix(F->rows, F->cols);
	for (int i = 0; i < F->length; i++) {
		// softsign
		//Fp->data[i] = 1 - abs(F->data[i]);
		//Fp->data[i] = Fp->data[i] * Fp->data[i];

		// tanh
		Fp->data[i] = 1 - F->data[i]*F->data[i];
	}
	return Fp;
}

Matrix* CGRNN::sigmoid(Matrix* X) {
	Matrix* F = new Matrix(X->rows, X->cols);
	for (int i = 0; i < F->length; i++) {
		F->data[i] = 1 / (1 + exp(-X->data[i]));
	}
	return F;
}

void CGRNN::compForward(Deptree* tree) {
	// computing rep. for heads
	tree->headRep = new Matrix*[tree->nNodes]();

	Matrix* input = new Matrix(dim);
	for (int i = 1; i < tree->nNodes; i++) {
		Matrix* Li = params->L[tree->word[i]];
		Matrix::gemv(1, CblasNoTrans, params->Ww, Li, 0, input);
		input->addi(params->Lpos[tree->pos[i]])->addi(params->bw);
		tree->headRep[i] = func(input);
	}
	tree->headRep[0] = Matrix::zeros(dim);

	// compute composition
	tree->cRep = new Matrix*[tree->nNodes]();
	for (int i = tree->nNodes-1; i >= 0; i--) {
		int nChildren = tree->nChildren[i][0] + tree->nChildren[i][1];
		if (nChildren == 0)
			tree->cRep[i] = tree->headRep[i]->dup();
		else {
			input->fill(0);
			for (int dir = 0; dir < 2; dir++) {
				for (int j = 0; j < tree->nChildren[i][dir]; j++) {
					int child = tree->children[i][dir][j];
					Matrix* W = params->Wc[dir][tree->deprel[child]];
					Matrix::gemv(1, CblasNoTrans, W, tree->cRep[child], 1, input);
				}
			}
			Matrix::gemv(1, CblasNoTrans, params->Wch, tree->headRep[i], CGRNN_NORM ? 1./nChildren : 1., input);
			input->addi(params->bc);
			tree->cRep[i] = func(input);
		}
	}

	delete input;
}

real CGRNN::genForward(Deptree* tree, Matrix* rootOuter) {
	tree->gRep = new Matrix*[tree->nNodes](); // full gen. rep.
	tree->pGRep = new Matrix*[tree->nNodes](); // partial gen. rep.
	tree->priorPGRep = new Matrix*[tree->nNodes]();
	tree->EOCGRep = new Matrix**[2] {new Matrix*[tree->nNodes](), new Matrix*[tree->nNodes]() };
	tree->priorEOCGRep = new Matrix**[2] {new Matrix*[tree->nNodes](), new Matrix*[tree->nNodes]() };

	// computing rep. for heads
	tree->headRep = new Matrix*[tree->nNodes]();

	for (int i = 1; i < tree->nNodes; i++) {
		Matrix* Li = params->L[tree->word[i]];
		Matrix* input = new Matrix(dim);
		Matrix::gemv(1, CblasNoTrans, params->Ww, Li, 0, input);
		input->addi(params->Lpos[tree->pos[i]])->addi(params->bw);
		tree->headRep[i] = func(input);
		delete input;
	}
	tree->headRep[0] = Matrix::zeros(dim);

	// precompute
	Matrix*** hiProds = new Matrix**[2]{new Matrix*[tree->nNodes](), new Matrix*[tree->nNodes]()}; // Wo * tree->headInner
	for (int i = 1; i < tree->nNodes; i++) {
		int dir = tree->dir[i];
		int deprel = tree->deprel[i];
		Matrix* W = params->Wg[dir][deprel];

		hiProds[dir][i] = new Matrix(dim);
		Matrix::gemv(1, CblasNoTrans, W, tree->headRep[i], 0, hiProds[dir][i]);
	}

	// compute partial/full gen. rep.
	Matrix* inputParent = new Matrix(dim);
	Matrix* inputHead = new Matrix(dim);

	for (int i = 0; i < tree->nNodes; i++) {
		// compute full gen. rep
		if (i == 0) {// ROOT
			tree->gRep[i] = rootOuter->dup();
			tree->pGRep[i] = Matrix::zeros(dim);
			tree->priorPGRep[i] = Matrix::zeros(dim);
		}
		else {
			int parent = tree->parent[i];
			int dir = tree->dir[i];
			Matrix::gemv(1, CblasNoTrans, params->Wgh[dir], tree->headRep[parent], 0, inputParent);
			Matrix::gemv(1, CblasNoTrans, params->Wgp[dir], tree->gRep[parent], 1, inputParent);
			inputParent->addi(params->bg[dir]);

			int nSisters = tree->nChildren[parent][0] + tree->nChildren[parent][1] - 1;
			if (nSisters == 0) {
				inputParent->addi(params->anonRep[dir]);
				tree->gRep[i] = func(inputParent);
			}
			else {
				Matrix* input = Matrix::zeros(dim);
				for (int sdir = 0; sdir < 2; sdir++) {
					for (int j = 0; j < tree->nChildren[parent][sdir]; j++) {
						int sister = tree->children[parent][sdir][j];
						if (sister < i) {
							input->addi(hiProds[sdir][sister]);
						}
						else if (sister > i) {
							input->addi(hiProds[sdir][sister]);
						}
					}
				}
				input->divi(CGRNN_NORM ? nSisters : 1.)->addi(inputParent);
				tree->gRep[i] = func(input);
				delete input;
			}
		}

		// compute partial gen. rep. for CHILDREN
		for (int dir = 0; dir < 2; dir++) {
			Matrix::gemv(1, CblasNoTrans, params->Wgh_p[dir], tree->headRep[i], 0, inputHead);
			Matrix::gemv(1, CblasNoTrans, params->Wgp_p[dir], tree->gRep[i], 1, inputHead);
			inputHead->addi(params->bg_p[dir]);

			for (int j = 0; j <= tree->nChildren[i][dir]; j++) {
				int child = j == tree->nChildren[i][dir] ? -1 : tree->children[i][dir][j];
				int preSister = j == 0 ? -1 : tree->children[i][dir][j-1];

				Matrix** priorRep = child == -1 ? &tree->priorEOCGRep[dir][i] : &tree->priorPGRep[child];
				Matrix** pGRep = child == -1 ? &tree->EOCGRep[dir][i] : &tree->pGRep[child];

				if (preSister == -1) {
					if (dir == 0)
						*priorRep = params->anonRep[0]->dup();
					else
						*priorRep = tree->priorEOCGRep[0][i]->dup();
				}
				else {
					Matrix* input = new Matrix(dim);
					Matrix::gemv(1, CblasNoTrans, params->Wg_p[dir][tree->deprel[preSister]], tree->headRep[preSister], 0, input);
					Matrix::gemv(1, CblasNoTrans, params->Wtr[dir], tree->priorPGRep[preSister], 1, input);
					input->addi(params->btr[dir]);
					*priorRep = func(input); delete input;
				}

				Matrix* input = new Matrix(dim);
				Matrix::gemv(1, CblasNoTrans, params->Wgs_p[dir], *priorRep, 0, input);
				input->addi(inputHead);
				*pGRep = func(input);
				delete input;
			}
		}
	}

	delete inputHead;
	delete inputParent;

	// compute softmax
	// Pr(deprel | gen. rep.)
	Matrix* score;
	tree->deprelProb = new Matrix*[tree->nNodes]();
	tree->deprelProb[0] = Matrix::zeros(params->bdr->length);
	score = new Matrix(params->bdr->length);
	for (int i = 1; i < tree->nNodes; i++) {
		Matrix::gemv(1, CblasNoTrans, params->Wdr, tree->pGRep[i], 0, score);
		score->addi(params->bdr);
		tree->deprelProb[i] = new Matrix(score->length);
		Utils::safelyComputeSoftmax(tree->deprelProb[i]->data, score->data, score->length);
	}

	tree->EOCProb = new Matrix**[2]{ new Matrix*[tree->nNodes](), new Matrix*[tree->nNodes]()};
	for (int i = 0; i < tree->nNodes; i++)
		for (int dir = 0; dir < 2; dir++) {
			Matrix::gemv(1, CblasNoTrans, params->Wdr, tree->EOCGRep[dir][i], 0, score);
			score->addi(params->bdr);
			tree->EOCProb[dir][i] = new Matrix(score->length);
			Utils::safelyComputeSoftmax(tree->EOCProb[dir][i]->data, score-> data, score->length);
		}
	delete score;

	// Pr(pos | deprel, gen. rep.)
	tree->posProb = new Matrix*[tree->nNodes]();
	tree->posProb[0] = Matrix::zeros(params->bpos->length);
	score = new Matrix(params->bpos->length);
	for (int i = 1; i < tree->nNodes; i++) {
		Matrix::gemv(1, CblasNoTrans, params->Wpos, tree->pGRep[i], 0, score);
		Matrix* temp = params->Wdrpos->getColumn(tree->deprel[i]);
		score->addi(temp)->addi(params->bpos);
		delete temp;
		tree->posProb[i] = new Matrix(score->length);
		Utils::safelyComputeSoftmax(tree->posProb[i]->data, score->data, score->length);
	}
	delete score;

	// compute errors
	real totalError = 0;
	for (int i = 1; i < tree->nNodes; i++)
		totalError -= log(tree->deprelProb[i]->data[tree->deprel[i]])
		+ log(tree->EOCProb[0][i]->data[deprelDic->size()])
		+ log(tree->EOCProb[1][i]->data[deprelDic->size()])
		+ log(tree->posProb[i]->data[tree->pos[i]]);

	totalError -= log(tree->EOCProb[0][0]->data[deprelDic->size()])
						+ log(tree->EOCProb[1][0]->data[deprelDic->size()]);


	// Pr(word | ...)
	tree->wordProb = new Matrix*[tree->nNodes]();
	tree->wordProb[0] = Matrix::zeros(1); // undefined
	if (softmaxEst == SOFTMAX_HIER) {
		for (int i = 1; i < tree->nNodes; i++) {
			int word = tree->word[i];
			int* path = vocaDic->path[word];
			real* code = vocaDic->code[word];
			int codeLen = vocaDic->codeLength[word];
			tree->wordProb[i] = Matrix::zeros(codeLen);
			for (int j = 0; j < codeLen; j++) {
				tree->wordProb[i]->data[j] = Matrix::dot(params->Wword[path[j]], tree->pGRep[i]);
			}
			Matrix* temp1 = params->Wdrword->getRows(path, codeLen);
			Matrix* temp2 = temp1->getColumn(tree->deprel[i]);
			tree->wordProb[i]->addi(temp2);
			delete temp1; delete temp2;

			temp1 = params->Wposword->getRows(path, codeLen);
			temp2 = temp1->getColumn(tree->pos[i]);
			tree->wordProb[i]->addi(temp2);
			delete temp1; delete temp2;

			temp1 = params->bword->getRows(path, codeLen);
			temp2 = new Matrix(codeLen, code);
			tree->wordProb[i]->addi(temp1)->muli(temp2);
			delete temp1; delete temp2;

			temp1 = tree->wordProb[i];
			tree->wordProb[i] = sigmoid(temp1);
			delete temp1;

			totalError -= Utils::sumLog(tree->wordProb[i]->data, tree->wordProb[i]->length);
		}
	}
	else if (softmaxEst == SOFTMAX_NCE) {
		tree->wordCands = new int*[tree->nNodes]();
		for (int i = 1; i < tree->nNodes; i++) {
			tree->wordCands[i] = new int[kNeg+1]();
			tree->wordCands[i][0] = tree->word[i];
			for (int j = 1; j < kNeg+1; j++)
				tree->wordCands[i][j] = noiseDistr(generator);

			tree->wordProb[i] = Matrix::zeros(kNeg+1);
			for (int j = 0; j < kNeg+1; j++)
				tree->wordProb[i]->data[j] = Matrix::dot(params->Wword[tree->wordCands[i][j]], tree->pGRep[i]);

			Matrix* temp1 = params->Wdrword->getRows(tree->wordCands[i], kNeg+1);
			Matrix* temp2 = temp1->getColumn(tree->deprel[i]);
			tree->wordProb[i]->addi(temp2);
			delete temp1; delete temp2;

			temp1 = params->Wposword->getRows(tree->wordCands[i], kNeg+1);
			temp2 = temp1->getColumn(tree->pos[i]);
			tree->wordProb[i]->addi(temp2);
			delete temp1; delete temp2;

			temp1 = params->bword->getRows(tree->wordCands[i], kNeg+1);
			tree->wordProb[i]->addi(temp1);
			delete temp1;

			for (int j = 0; j < kNeg+1; j++) {
				tree->wordProb[i]->data[j] = (j == 0 ? exp(tree->wordProb[i]->data[j]) : kNeg*noiseDistr.probabilities()[tree->wordCands[i][j]])
						/ (exp(tree->wordProb[i]->data[j]) + kNeg*noiseDistr.probabilities()[tree->wordCands[i][j]]);
				totalError -= log(tree->wordProb[i]->data[j]);
			}
		}
	}

	Matrix::free(hiProds, 2, tree->nNodes);
	return totalError;
}

void CGRNN::genBackprop(Deptree* tree, Param* grad, real mult) {
	tree->gradheadRep = new Matrix*[tree->nNodes]();
	tree->gradGRep = new Matrix*[tree->nNodes]();
	tree->gradpGRep = new Matrix*[tree->nNodes];
	tree->gradPriorPGRep = new Matrix*[tree->nNodes];
	tree->gradEOCGRep = new Matrix**[2] { new Matrix*[tree->nNodes](), new Matrix*[tree->nNodes]() };
	tree->gradPriorEOCGRep = new Matrix**[2] { new Matrix*[tree->nNodes](), new Matrix*[tree->nNodes]() };

	for (int i = 0; i < tree->nNodes; i++) {
		tree->gradheadRep[i] = Matrix::zeros(dim);
		tree->gradGRep[i] = Matrix::zeros(dim);
		tree->gradpGRep[i] = Matrix::zeros(dim);
		tree->gradPriorPGRep[i] = Matrix::zeros(dim);
		tree->gradEOCGRep[0][i] = Matrix::zeros(dim);
		tree->gradEOCGRep[1][i] = Matrix::zeros(dim);
		tree->gradPriorEOCGRep[0][i] = Matrix::zeros(dim);
		tree->gradPriorEOCGRep[1][i] = Matrix::zeros(dim);
	}

	Matrix** gZdr = tree->deprelProb;
	Matrix** gZpos = tree->posProb;
	Matrix** gZword = tree->wordProb;
	Matrix*** gZ_EOC = tree->EOCProb;

	for (int i = 1; i < tree->nNodes; i++) {
		gZdr[i]->data[tree->deprel[i]]--;
		gZpos[i]->data[tree->pos[i]]--;
	}

	for (int i = 0; i < tree->nNodes; i++) {
		gZ_EOC[0][i]->data[deprelDic->size()]--;
		gZ_EOC[1][i]->data[deprelDic->size()]--;
	}

	for (int i = 0; i < tree->nNodes; i++) {
		gZdr[i]->muli(mult);
		gZpos[i]->muli(mult);
		gZword[i]->muli(mult);
		gZ_EOC[0][i]->muli(mult);
		gZ_EOC[1][i]->muli(mult);
	}

	// for Pr (. | context)
	for (int i = 0; i < tree->nNodes; i++) {
		Matrix::ger(1, gZdr[i], tree->pGRep[i], grad->Wdr);
		Matrix::ger(1, gZ_EOC[0][i], tree->EOCGRep[0][i], grad->Wdr);
		Matrix::ger(1, gZ_EOC[1][i], tree->EOCGRep[1][i], grad->Wdr);
		grad->bdr->addi(gZdr[i])->addi(gZ_EOC[0][i])->addi(gZ_EOC[1][i]);

		Matrix::gemv(1, CblasTrans, params->Wdr, gZdr[i], 1, tree->gradpGRep[i]);
		Matrix::gemv(1, CblasTrans, params->Wdr, gZ_EOC[0][i], 1, tree->gradEOCGRep[0][i]);
		Matrix::gemv(1, CblasTrans, params->Wdr, gZ_EOC[1][i], 1, tree->gradEOCGRep[1][i]);

		Matrix::ger(1, gZpos[i], tree->pGRep[i], grad->Wpos);
		grad->bpos->addi(gZpos[i]);
		Matrix::gemv(1, CblasTrans, params->Wpos, gZpos[i], 1, tree->gradpGRep[i]);
		grad->Wdrpos->addColumn(tree->deprel[i], gZpos[i]);
	}

	if (softmaxEst == SOFTMAX_HIER) {
		for (int i = 1; i < tree->nNodes; i++) {
			int word = tree->word[i];
			int* path = vocaDic->path[word];
			int codeLen = vocaDic->codeLength[word];
			int dr = tree->deprel[i];
			int pos = tree->pos[i];

			Matrix* temp = new Matrix(codeLen, vocaDic->code[word]);
			gZword[i]->addi(-1)->muli(temp); delete temp;

			for (int j = 0; j < codeLen; j++) {
				int p = path[j];
				real g = gZword[i]->data[j];

				tree->gradpGRep[i]->addi(g, params->Wword[p]);

				Matrix::axpy(g, tree->pGRep[i], grad->Wword[p]);
				grad->bword->data[p] += g;
				grad->Wdrword->put(p, dr, grad->Wdrword->get(p, dr) + g);
				grad->Wposword->put(p, pos, grad->Wposword->get(p, pos) + g);
			}
		}
	}
	else if (softmaxEst == SOFTMAX_NCE) {
		for (int i = 1; i < tree->nNodes; i++) {
			int dr = tree->deprel[i];
			int pos = tree->pos[i];

			gZword[i]->muli(-1)->addi(1); gZword[i]->data[0] *= -1;

			for (int j = 0; j < kNeg+1; j++) {
				int wordC = tree->wordCands[i][j];
				real g = gZword[i]->data[j];

				tree->gradpGRep[i]->addi(gZword[i]->data[j], params->Wword[wordC]);

				Matrix::axpy(g, tree->pGRep[i], grad->Wword[wordC]);
				grad->bword->data[wordC] += g;
				grad->Wdrword->put(wordC, dr, grad->Wdrword->get(wordC, dr) + g);
				grad->Wposword->put(wordC, pos, grad->Wposword->get(wordC, pos) + g);
			}
		}
	}

	// backward
	for (int i = 0; i < tree->nNodes; i++) {
		Matrix *temp = funcPrime(tree->pGRep[i]);
		tree->gradpGRep[i]->muli(temp); delete temp;
		for (int dir = 0; dir < 2; dir++) {
			temp = funcPrime(tree->EOCGRep[dir][i]);
			tree->gradEOCGRep[dir][i]->muli(temp); delete temp;
		}
	}

	Matrix*** sums = new Matrix**[2] {new Matrix*[tree->nNodes](), new Matrix*[tree->nNodes]()};
	for (int i = 0; i < tree->nNodes; i++)
		for (int dir = 0; dir < 2; dir++) {
			sums[dir][i] = Matrix::zeros(dim);
		}

	for (int i = tree->nNodes-1; i >= 0; i--) {

		// for partial gen. rep.
		for (int dir = 1; dir >= 0; dir--) {
			for (int j = tree->nChildren[i][dir]; j >= 0; j--) {
				int child = j == tree->nChildren[i][dir] ? -1 : tree->children[i][dir][j];
				int preSister = j == 0 ? -1 : tree->children[i][dir][j-1];

				Matrix* gz = child == -1 ? tree->gradEOCGRep[dir][i] : tree->gradpGRep[child];
				Matrix* gzPrior = child == -1 ? tree->gradPriorEOCGRep[dir][i] : tree->gradPriorPGRep[child];
				Matrix* priorRep = child == -1 ? tree->priorEOCGRep[dir][i] : tree->priorPGRep[child];

				Matrix::gemv(1, CblasTrans, params->Wgh_p[dir], gz, 1, tree->gradheadRep[i]);
				Matrix::gemv(1, CblasTrans, params->Wgp_p[dir], gz, 1, tree->gradGRep[i]);
				Matrix::ger(1, gz, tree->headRep[i], grad->Wgh_p[dir]);
				Matrix::ger(1, gz, tree->gRep[i], grad->Wgp_p[dir]);
				grad->bg_p[dir]->addi(gz);

				Matrix::ger(1, gz, priorRep, grad->Wgs_p[dir]);
				Matrix::gemv(1, CblasTrans, params->Wgs_p[dir], gz, 1, gzPrior);

				if (preSister == -1) {
					if (dir == 0)
						grad->anonRep[0]->addi(gzPrior);
					else
						tree->gradPriorEOCGRep[0][i]->addi(gzPrior);
				}
				else {
					Matrix* temp = funcPrime(priorRep);
					gzPrior->muli(temp); delete temp;

					Matrix::ger(1, gzPrior, tree->priorPGRep[preSister], grad->Wtr[dir]);
					Matrix::gemv(1, CblasTrans, params->Wtr[dir], gzPrior, 1, tree->gradPriorPGRep[preSister]);
					Matrix::ger(1, gzPrior, tree->headRep[preSister], grad->Wg_p[dir][tree->deprel[preSister]]);
					Matrix::gemv(1, CblasTrans, params->Wg_p[dir][tree->deprel[preSister]], gzPrior, 1, tree->gradheadRep[preSister]);
					grad->btr[dir]->addi(gzPrior);
				}
			}
		}

		// for full gen. rep.
		if (i > 0) {
			int parent = tree->parent[i];
			int dir = tree->dir[i];
			Matrix* temp = funcPrime(tree->gRep[i]);
			Matrix* gz = tree->gradGRep[i]->muli(temp); delete temp;

			Matrix::ger(1, gz, tree->headRep[parent], grad->Wgh[dir]);
			Matrix::ger(1, gz, tree->gRep[parent], grad->Wgp[dir]);
			grad->bg[dir]->addi(gz);

			Matrix::gemv(1, CblasTrans, params->Wgh[dir], gz, 1, tree->gradheadRep[parent]);
			Matrix::gemv(1, CblasTrans, params->Wgp[dir], gz, 1, tree->gradGRep[parent]);

			int nSisters = tree->nChildren[parent][0] + tree->nChildren[parent][1] - 1;
			if (nSisters == 0)
				grad->anonRep[dir]->addi(gz);
			else {
				real t = (real)1. / nSisters;
				for (int cdir = 0; cdir < 2; cdir++) {
					for (int j = 0; j < tree->nChildren[parent][cdir]; j++) {
						int sister = tree->children[parent][cdir][j];
						if (sister < i) {
							Matrix::axpy(CGRNN_NORM ? t : 1, gz, sums[cdir][sister]);
						}
						else if (sister > i) {
							Matrix::axpy(CGRNN_NORM ? t : 1, gz, sums[cdir][sister]);
						}
					}
				}
			}
		}
	}

	// for head
	for (int i = 1; i < tree->nNodes; i++) {
		int dir = tree->dir[i];
		int dr = tree->deprel[i];
		Matrix::ger(1, sums[dir][i], tree->headRep[i], grad->Wg[dir][dr]);
		Matrix::gemv(1, CblasTrans, params->Wg[dir][dr], sums[dir][i], 1, tree->gradheadRep[i]);

		int word = tree->word[i];
		int pos = tree->pos[i];

		Matrix* temp = funcPrime(tree->headRep[i]);
		Matrix* gz = tree->gradheadRep[i]->mul(temp); delete temp;

		Matrix::ger(1, gz, params->L[word], grad->Ww);
		grad->bw->addi(gz);
		Matrix::gemv(1, CblasTrans, params->Ww, gz, 1, grad->L[word]);
		grad->Lpos[pos]->addi(gz);
		delete gz;
	}

	Matrix::free(sums, 2, tree->nNodes);
}

void CGRNN::compBackprop(Deptree* tree, Param* grad, Matrix* gradRoot) {
	tree->gradCRep = new Matrix*[tree->nNodes]();
	tree->gradCRep[0] = gradRoot->dup();
	for (int i = 1; i < tree->nNodes; i++)
		tree->gradCRep[i] = Matrix::zeros(dim);

	for (int i = 0; i < tree->nNodes; i++) {
		int word = tree->word[i];
		int pos = tree->pos[i];

		int nChildren = tree->nChildren[i][0] + tree->nChildren[i][1];
		Matrix *temp = funcPrime(tree->cRep[i]);
		Matrix *gz = tree->gradCRep[i]->mul(temp); delete temp;

		// for head
		Matrix* gradZhead = gz->dup();
		if (nChildren > 0) {
			Matrix::gemv(1, CblasTrans, params->Wch, gz, 0, gradZhead);
			Matrix *temp = funcPrime(tree->headRep[i]);
			gradZhead->muli(temp); delete temp;
			Matrix::ger(1, gz, tree->headRep[i], grad->Wch);
		}

		if (i > 0) {
			Matrix::ger(1, gradZhead, params->L[word], grad->Ww);
			grad->bw->addi(gradZhead);
			Matrix::gemv(1, CblasTrans, params->Ww, gradZhead, 1, grad->L[word]);
			grad->Lpos[pos]->addi(gradZhead);
		}
		delete gradZhead;

		// for children
		if (nChildren > 0) {
			grad->bc->addi(gz);
			gz->divi(CGRNN_NORM ? nChildren : 1);
			for (int dir = 0; dir < 2; dir++) {
				for (int j = 0; j < tree->nChildren[i][dir]; j++) {
					int child = tree->children[i][dir][j];
					int dr = tree->deprel[child];

					Matrix::ger(1, gz, tree->cRep[child], grad->Wc[dir][dr]);
					Matrix::gemv(1, CblasTrans, params->Wc[dir][dr], gz, 1, tree->gradCRep[child]);
				}
			}
		}
		delete gz;
	}
}

real CGRNN::computeCostAndGrad(Container* cp, int startId, int endId, Param** gradList, unordered_set<int> &tWords, unordered_set<int> &tPath) {
	Treebank* tb = (Treebank*)cp;
	tWords.clear();
	tPath.clear();

	// prepare data, the first entry is the main
	for (int i = 0; i < nThreads; i++)
		gradList[i]->fill(0);

	real* costList = new real[nThreads]();
	int step = ceil((endId - startId+1) / (float)nThreads);

#pragma omp parallel for
	for (int th = 0; th < nThreads; th++) {
		for (int i = startId + th*step; i <= min(endId, startId + (th+1)*step-1); i++) {
			Deptree* cTree = tb->get(i);
			Deptree* gTree = cTree->clone();
			compForward(cTree);
			costList[th] += genForward(gTree, cTree->cRep[0]);

			genBackprop(gTree, gradList[th]);
			compBackprop(cTree, gradList[th], gTree->gradGRep[0]);

			for (int k = 1; k < cTree->nNodes; k++) {
				int word = cTree->word[k];
				tWords.insert(word);

				if (softmaxEst == SOFTMAX_HIER) {
					int* path = vocaDic->path[word];
					int codeLen = vocaDic->codeLength[word];
					for (int p = 0; p < codeLen; p++)
						tPath.insert(path[p]);
				}
				else if (softmaxEst == SOFTMAX_NCE) {
					for (int j = 0; j < kNeg+1; j++)
						tPath.insert(gTree->wordCands[k][j]);
				}
			}

			cTree->freeTempArrays();
			Deptree::free(gTree);
		}
	}

	Param* grad = gradList[0];

	// merge
	for (int i = 1; i < nThreads; i++) {
		for (int j = 0; j < grad->nMatricesWoWE; j++)
			grad->weights[j]->addi(gradList[i]->weights[j]);

		for (int p : tPath) {
			int j = p + grad->nMatricesWoWE;
			grad->weights[j]->addi(gradList[i]->weights[j]);
		}

		for (int w : tWords) {
			int j = w + grad->Wword.size() + grad->nMatricesWoWE;
			grad->weights[j]->addi(gradList[i]->weights[j]);
		}

		costList[0] += costList[i];
	}

	// compute regularities
	real cost = costList[0];
	int nSample = endId - startId + 1;
	cost /= nSample;

	for (int i = 0; i < params->nMatricesWoWE; i++) {
		Matrix* W = params->weights[i];
		cost += Utils::sumSqr(W->data, W->length) * lambda/2;
		grad->weights[i]->divi(nSample)->addi(lambda, W);
	}

	for (int word : tWords) {
		Matrix* wemb = params->L[word];
		cost += Utils::sumSqr(wemb->data, wemb->length) * lambdaL/2;
		grad->L[word]->divi(nSample)->addi(lambdaL, wemb);
	}

	for (int p : tPath) {
		Matrix* v = params->Wword[p];
		cost += Utils::sumSqr(v->data, v->length) * lambda/2;
		grad->Wword[p]->divi(nSample)->addi(lambda, v);
	}

	delete[] costList;

	return cost;
}


// make sure gradients are computed correctly
void CGRNN::checkGradient(Container* tb) {
	real epsilon = 1e-4;
	Param** grad = new Param*[nThreads];
	for (int i = 0; i < nThreads; i++)
		grad[i] = createGrad();

	unordered_set<int> tWords;
	unordered_set<int> tPath;
	generator = default_random_engine();
	computeCostAndGrad(tb, 0, tb->size()-1, grad, tWords, tPath);

	vector<real> gradV;
	vector<real> numGradV;
	Param** otherGrad = new Param*[nThreads];
	for (int i = 0; i < nThreads; i++)
		otherGrad[i] = createGrad();

	for (unsigned int i = 0; i < params->weights.size(); i++) {
		Matrix* W = params->weights[i];
		for (int r = 0; r < W->rows; r++) {
			for (int c = 0; c < W->cols; c++) {
				gradV.push_back(grad[0]->weights[i]->get(r,c));

				// plus
				W->put(r, c, W->get(r,c)+epsilon);
				generator = default_random_engine();
				real pluscost = computeCostAndGrad(tb, 0, tb->size()-1, otherGrad, tWords, tPath);

				// minus
				W->put(r, c, W->get(r,c)-2*epsilon);
				generator = default_random_engine();
				real minuscost = computeCostAndGrad(tb, 0, tb->size()-1, otherGrad, tWords, tPath);

				W->put(r, c, W->get(r,c)+epsilon);

				numGradV.push_back((pluscost - minuscost) / (2*epsilon));
				real diff = abs(numGradV.back() - gradV.back());
				if (diff > 0 && abs(gradV.back()) < 1e-7)
					cout << diff << " " << i << " " << r << " " << c << " : " << numGradV.back() << " " << gradV.back() << endl;
				if (diff > 5e-9)
					cout << diff << " " << i << " " << r << " " << c << " : " << numGradV.back() << " " << gradV.back() << endl;
			}
		}
	}

	Matrix* X = new Matrix((int)gradV.size(), &gradV[0]);
	Matrix* Y = new Matrix((int)numGradV.size(), &numGradV[0]);
	real diff = Matrix::nrm2(X->add(-1, Y)) / Matrix::nrm2(X->add(Y));
	cout << diff << endl;
	cout << "should be < 1e-9" << endl;

	for (int i = 0; i < nThreads; i++) {
		Param::free(grad[i]);
		Param::free(otherGrad[i]);
	}
	delete[] grad;
	delete[] otherGrad;
	delete X;
	delete Y;
}
