require 'add_comp_rbm'
require 'dict'

torch.setdefaulttensortype('torch.DoubleTensor')


function main() 
	-- load dic & embs
	print('load dic & emb')
	local f = torch.DiskFile(dic_emb_path, 'r')
	dic 	= f:readObject()	setmetatable(dic, Dict_mt)
	wemb 	= f:readObject()
	f:close()

	-- load data
	print('load data')
	f = torch.DiskFile(data_path, 'r')
	f:binary()
	data	= f:readObject():double()
	data	= shuffle(data)
	wlist	= f:readObject()
	cwlist	= f:readObject()
	f:close()

	--=============== main part ============

	ws = data[{{1},{}}]
	cws = data[{{2,-1},{}}]
	compdim = wemb:size(2)

	-- simplest model : just 'sigmoidize' the embeddings
	local model = AddCompRBM:new(wemb, compdim, 1, wlist)
	model.wembcomp = torch.eye(compdim)
	model.compbias:fill(0)
	model.wembbias:fill(0)
	model:save_to_file('comp_model/1.model')
	
	print('generate compstates')
	local comp_probs, _ = model:compute_comp_probs_states(ws)
	local compstates = comp_probs

	local nctxwords = cws:size(1)
	local newdata = torch.zeros(nctxwords+compstates:size(1), 
								data:size(2))
	newdata[{{1,nctxwords},{}}]:copy(cws)
	newdata[{{nctxwords+1,-1},{}}]:copy(compstates)
	
	print('save to file')
	local f = torch.DiskFile(output_path, 'w')
	f:binary()
	f:writeObject(newdata)
	f:writeObject(wlist)
	f:writeObject(cwlist)
	f:close() 
end

if #arg ~= 3 then
	print("<dic_emb_path> <data_path> <output_path>")
else
	dic_emb_path	= arg[1]
	data_path		= arg[2]
	output_path 	= arg[3]
	main()
end




