require 'RAE'
require 'tree'
require 'dict'

D_WRDEMB = 50
KBEST = 50
NSAMPLE = 10000

--*************** load data ****************
function loadSyntForest( gFn, pFn , dic)

	local i

	local gForest = {}
	print('loading golden forest...')
	i = 0
	for line in io.lines(gFn) do
		--print(line)
		i = i + 1
		if i > NSAMPLE then break end

		local t = tree:createFromString(line)
		t:binarize(true)
		t:removeTags()
		gForest[#gForest+1] = t
	end

	local curGoldTree = gForest[1]
	local forest = {curGoldTree}
	print('loading kbest forest...')

	local nkbest = 1
	i = 1
	for line in io.lines(pFn) do
		if line == '' then 
			-- turn to new golden tree
			i = i + 1
			if i > NSAMPLE then break end

			curGoldTree = gForest[i] --print(i)
			if curGoldTree == nil then error('does not match') end
			forest[#forest+1] = curGoldTree
			nkbest = 1

		elseif line ~= '(())' then
			if nkbest <= KBEST then
				-- new kbest tree
				local t = tree:createFromString(line)
				t:binarize(true)
				t:removeTags()
			
				-- check if this kbest tree is for this golden tree
				if t:toString(true) ~= curGoldTree:toString(true) then
					error('does not match')
				end
				--print(t:toString(true)) print(curGoldTree:toString(true))

				-- mark unmatched internal nodes
				t:compare(curGoldTree)
				forest[#forest+1] = t
				nkbest = nkbest + 1
			end
		end
		
	end

	forest = word2id(forest, dic)

	--local file = io.open('forest.txt', 'w')
	for i = 1,#forest do
		--file:write(forest[i]:toString() .. '\n')
		forest[i] = forest[i]:compact()
	end
	--file:close()
	return forest
end

function splitData( Forest, rate )
	local nSample = #Forest
	local nTest = math.floor(nSample * rate)
	local testForest = {}
	for i=nSample-nTest+1,nSample do
		testForest[#testForest+1] = Forest[i]
		Forest[i] = nil
	end
	return Forest, testForest
end

function word2id( forest, dic  )
	for _,t in ipairs(forest) do
		local leaves = t:leaves()
		for _,node in ipairs(leaves) do
			-- unknown word has the label "UNKNOWN"
			node.label = dic.word2id[node.label:lower()] or dic.word2id['UNKNOWN']
			if node.label == nil then error('unknown word') end
		end
	end
	return forest
end

function loadDict( filename )
	local dic = dict:new()
	dic:load(filename)
	return dic
end

function loadWordEmbedding( filename , n, dim)
	local dim = dim or D_WRDEMB
	local f = torch.DiskFile.new(filename, 'r')
	local L = torch.Tensor( f:readDouble(n*dim) ):resize(n,dim):t()
	return L 
end

--********************* save/load obj ****************
function saveObject( obj , fn )
	local f = torch.DiskFile(fn, 'w')
	f:binary()
	f:writeObject(obj)
	f:close()
end

function loadObject( fn )
	local f = torch.DiskFile(fn, 'r')
	f:binary()
	local obj = f:readObject()
	f:close()
	return obj
end

--******************** training *******************
require 'optim'

function train( sForestTrain, sForestTest , L )

	print('create network...')
	--local struct = {nCategory = 2, Lookup = L , func = tanh, funcPrime = norm2TanhPrime }
	--local rae = reAutoEncoder:new(struct)
	local rae = reAutoEncoder:load('model.20')

	print('train...')
	rae:train(sForestTrain, sForestTest, 5000, optim.lbfgs,
		{maxIter=1000, learningRate=1},
		{alpha = 0.2, lambda = 1e-4})

end

--************************ test ***************
function test ()
	local struct = {nCategory = 2, Lookup = torch.randn(2,8) , func = tanh, funcPrime = norm2TanhPrime }
	local rae = reAutoEncoder:new(struct)
	
        local t1 = tree:createFromString('(X[5/] (X[5/]=H (X[1/] (X[1/]=H (X[1/]=H 1) (X[4/] 4)) (X[6/] 6)) (X[5/]=H (X[3/] 3) (X[5/]=H 5))) (X[2/] (X[2/]=H 2) (X[3/] 3)))')
        local t2 = tree:createFromString('(X (X 1) (X (X 2) (X 3)))')

	t1:binarize(true)
	t1:removeTags() print(t1:toString())
	t1 = t1:compact() print(tree.comp2string(t1))

	--t2:binarize(true)
	--t2:removeTags() print(t2:toString())
	--t2 = t2:compact() print(tree.comp2string(t2))

	local config = {lambda = 1e-1, alpha = 0.2}
	rae:checkGradient({t1},config)
end
--test()

--****************** main ****************
function main()
	
	-- load dic & word embeddings
	print('loading dictionary...') io.flush()
	local dic = loadDict('wordembeddings/words.lst')

	print('loading word-embeddings...') io.flush()
	local L = loadWordEmbedding('wordembeddings/embeddings.txt', dic:size())

	-- load data --
	NSAMPLE = 36000
	print('loading syntactic forest...') io.flush()
	local forest = loadSyntForest('Data/preprocessed_wsj/wsj-22.mrg', 'Data/preprocessed_wsj/wsj-22.mrg.parse', dic)
	saveObject( forest, 'syntForest.bin.wsj22')-- .. NSAMPLE )
	--local forest = loadObject('syntForest.bin.' .. NSAMPLE)
	
	-- split data
	local trainForest, testForest = splitData(forest, 1)

	local function acc( forest )
		local accM = 0
		local accS = 0
		for _,t in ipairs(forest) do
			for _,node in ipairs(t) do
				local nChild = #node.childId
				if nChild > 0 then
					accS = accS + 1
					if node.meaningful then accM = accM + 1 end
				end
			end
		end
		return accM, accS, accM/accS
	end
	
	print(acc(trainForest))
	print(acc(testForest))
	io.flush()
	
	--[[
	for i = 1, #testForest do
		print(tree.comp2string(testForest[i]))
	end
	]]

	-- train
	train( trainForest, testForest, L )
end

--main()

