require 'add_comp_rbm'
require 'dict'

torch.setdefaulttensortype('torch.DoubleTensor')

function load_data( rawtextfile, ctxwordpath, wordpath , ctxwindowsize, wordembpath )

	-- load dictionaries
	local cwdic = Dict:new()
	cwdic:load(ctxwordpath)
	cwdic:addword('#START#')
	cwdic:addword('#END#')
	cwdic:addword('#ANOTHER#')

	local wdic = Dict:new()
	wdic:load(wordpath)
	wdic:addword('#NULL#')

	cwfreq = torch.ones(cwdic:size())
	wfreq = {torch.ones(wdic:size()), torch.ones(wdic:size())}

	-- load extract tuples
	local acc = {}
	for line in io.lines(rawtextfile) do
		local sent = {}
		for token in string.gmatch(line, "[^ ]+") do
			sent[#sent+1] = token
		end

		local ntoks = #sent
		for i,tok in ipairs(sent) do
			local id2 = wdic.word2id[tok]
			if id2 ~= nil then
				if math.random() < 0.1 then
					-- only word
					local d = torch.Tensor(2*ctxwindowsize+2)
					d[-2] = wdic.word2id['#NULL#']
					d[-1] = id2
					wfreq[1][{d[-2]}] = wfreq[1][{d[-2]}] + 1
					wfreq[2][{d[-1]}] = wfreq[2][{d[-1]}] + 1
	
					for j=i-ctxwindowsize,i-1 do
						local cwid = -1
						if j < 1 then cwid = cwdic.word2id['#START#']
						elseif cwdic.word2id[sent[j]] == nil then 
							cwid = cwdic.word2id['#ANOTHER#']
						else cwid = cwdic.word2id[sent[j]] end
						d[j-i+ctxwindowsize+1] = cwid
						cwfreq[cwid] = cwfreq[cwid] + 1
					end
					for j=i+1,i+ctxwindowsize do
						local cwid = -1
						if j > ntoks then cwid = cwdic.word2id['#END#']
						elseif cwdic.word2id[sent[j]] == nil then 
							cwid = cwdic.word2id['#ANOTHER#']
						else cwid = cwdic.word2id[sent[j]] end
						d[j-i+ctxwindowsize] = cwid
						cwfreq[cwid] = cwfreq[cwid] + 1
					end
					acc[#acc+1] = d 
				end
				-- phrase
				if i >= 2 then
					local id1 = wdic.word2id[sent[i-1]]
					if id1 ~= nil then
						local d = torch.Tensor(2*ctxwindowsize+2)
						d[-2] = id1
						d[-1] = id2
						wfreq[1][{d[-2]}] = wfreq[1][{d[-2]}] + 1
						wfreq[2][{d[-1]}] = wfreq[2][{d[-1]}] + 1

						for j=i-ctxwindowsize-1,i-2 do
							local cwid = -1
							if j < 1 then cwid = cwdic.word2id['#START#']
							elseif cwdic.word2id[sent[j]] == nil then 
								cwid = cwdic.word2id['#ANOTHER#']
							else cwid = cwdic.word2id[sent[j]] end
							d[j-(i-1)+ctxwindowsize+1] = cwid
							cwfreq[cwid] = cwfreq[cwid] + 1
						end
						for j=i+1,i+ctxwindowsize do
							local cwid = -1
							if j > ntoks then cwid = cwdic.word2id['#END#']
							elseif cwdic.word2id[sent[j]] == nil then 
								cwid = cwdic.word2id['#ANOTHER#']
							else cwid = cwdic.word2id[sent[j]] end
							d[j-i+ctxwindowsize] = cwid
							cwfreq[cwid] = cwfreq[cwid] + 1
						end
						acc[#acc+1] = d
					end
				end
			end
		end
	end

	local data = torch.Tensor(2*ctxwindowsize+2, #acc)
	for i,d in ipairs(acc) do
		data[{{},i}]:copy(d)
	end

	-- create sampler_storage
	for _,wf in ipairs(wfreq) do
		wf:div( wf:sum())
	end
	cwfreq = cwfreq:div(cwfreq:sum())
	cw_alias, cw_prob = init_alias_sampling(cwfreq)

	w_ws = {}
	w_prob = {}
	for i = 1,2 do 
		w_ws[i] = {}
		w_ws[i].alias, w_ws[i].prob = init_alias_sampling(wfreq[i])
		w_prob[i] = wfreq[i]
	end

	sampler_storage = { cw_ws = { alias = cw_alias, prob = cw_prob },
						cw_prob = cwfreq,
						w_ws = w_ws,
						w_prob = w_prob
					}

	-- load embeddings
	f = torch.DiskFile(wordembpath, 'r')
	local info = f:readInt(2)
	local nword = info[1]	
	local embdim = info[2]	
	local embs = 	torch.Tensor(f:readDouble(nword*embdim))
					:resize(nword, embdim)
	f:close()
	cwemb = torch.randn(cwdic:size(), embdim)
	cwemb[{{1,nword},{}}]:copy(embs)
	wemb = torch.randn(wdic:size(), embdim)
	wemb[{{1,nword},{}}]:copy(embs)

	return data, cwdic, wdic, cwemb, wemb, sampler_storage
end


function extract_data()

	-- load data
	data, cwdic, wdic, cwemb, wemb, sampler_storage = 
			load_data( 	'data/toydata/rawtext.txt', 
						'data/toydata/ctxwords.txt', 
						'data/toydata/words.txt', 2, 
						'data/toydata/embs.txt' )
	local f = torch.DiskFile('data/toydata.dat', 'w')
	f:binary()
	f:writeObject(data)
	f:writeObject(cwdic)
	f:writeObject(wdic)
	f:writeObject(cwemb)
	f:writeObject(wemb)
	f:writeObject(sampler_storage)
	f:close()

	print(sampler_storage)
end

--extract_data()

function main()
	-- load data
	local f = torch.DiskFile('data/toydata.dat', 'r')
	f:binary()
		local data = f:readObject():long()
		local cwdic = f:readObject() setmetatable(cwdic, Dict_mt)
		local wdic = f:readObject() setmetatable(wdic, Dict_mt)
		local cwemb = f:readObject()
		local wemb = f:readObject()
		local sampler_storage = f:readObject()
	f:close()

	comp_data = data[{{-2,-1},{}}]:double()
	TRAIN = false

	-- training
	if TRAIN then
		print('training...')

		init_momentum = 0.9
		final_momentum = 0.9
		eps = 0.01
		weightcost = 0.001
		chainlen = 101

		nepoch = 100
		batchsize = 100

		hiddim = 100

		local model = AddCompRBM:new(wemb, hiddim, 2)
		--local model = AddCompRBM:load_from_file('model/31.model')
	
		print('training rbms...')
		model:train(comp_data, nepoch, batchsize, init_momentum, 
						final_momentum, eps, 
						weightcost, chainlen, sampler_storage)

	-- generate data
	else 
		local model = AddCompRBM:load_from_file('comp_model/4.model')
		print('generate hidstates')
		local hid_probs, _ = model:compute_comp_probs_states(comp_data)
		local hidstates = hid_probs
		local newdata = torch.zeros(data:size(1)-2+hidstates:size(1), 
									data:size(2))
		newdata[{{1,data:size(1)-2},{}}]:copy(data[{{1,-3},{}}])
		newdata[{{data:size(1)-2+1,-1},{}}]:copy(hidstates)
	
		print('save to file')
		local f = torch.DiskFile('data/newtoydata.dat', 'w')
		f:binary()
		f:writeObject(newdata)
		f:writeObject(cwdic)
		f:writeObject(wdic)
		f:writeObject(cwemb)
		f:writeObject(wemb)
		f:writeObject(sampler_storage)
		f:close()
	end
	
end

main()


