require "dict"
require 'lexsem_crbm'


torch.setdefaulttensortype('torch.DoubleTensor')

function main()

	-- load dic & embs
	print('load dic & emb')
	local f = torch.DiskFile(dic_emb_path, 'r')
	cw_dic 	= f:readObject()	setmetatable(cw_dic, Dict_mt)
	cemb 	= f:readObject()
	f:close()

	-- load data
	print('load data')
	f = torch.DiskFile(data_path, 'r')
	f:binary()
	data = f:readObject():double()
	tw_dic = f:readObject()		setmetatable(tw_dic, Dict_mt)
	sampler_storage = f:readObject()
	f:close()

	-- training
		print('training...')
		--data = data[{{},{1,10000}}]

		nctxwords = data:size(1) - 1
		hiddim = 300
		ntwords = tw_dic:size()

		init_momentum = 0.9
		final_momentum = 0.9
		eps = 0.01
		weightcost = 0.001
		chainlen = 100 

		nepoch = 10000
		batchsize = 1000

		--local model = LexSemCRBM:new(temb, cemb, nctxwords, hiddim)
		--model:save_to_file('model/0.model')
		model = LexSemCRBM:load_from_file('model/3.model')
		--model.t_emb		= torch.randn(model.t_emb:size()):mul(0.01)
		--model.t_weight	= torch.randn(model.t_weight:size()):mul(0.01)
		--model.t_bias:fill(0)

		print('training rbms...')
		model:train(data, nepoch, batchsize, init_momentum, final_momentum, 
					eps, weightcost, chainlen, sampler_storage)
end

if #arg ~= 2 then
	print("<dic_emb_path> <data_path> ")
else
	dic_emb_path			= arg[1]
	data_path				= arg[2]
	main()
end


