require 'cutils'

function shuffle( data, dim )
	local dim = dim or 2
	local new_data = data:clone()

	if dim == 2 then
		local n = data:size(2)
		local ids = torch.randperm(n)
		for i = 1,n do
			new_data[{{},i}]:copy(data[{{},ids[i]}])
		end
	else
		error('not implemented yet!!!')
	end

	return new_data
end

function split_string( str , pattern )
	local pattern = pattern or "[^\t ]+"
	local toks = {}
	for k,v in string.gmatch(str, pattern) do
		toks[#toks+1] = k
	end
	return toks
end

function sigmoid( x )
	local t = (-x):exp():add(1)
	return torch.ones(t:size()):cdiv(t)
end

function sample_bernoulli( probs )
	return torch.ge(probs, torch.rand(probs:size()))
end

function safe_compute_softmax( A )
        maxA,_ = A:max(1)
        maxA = torch.repeatTensor(maxA, A:size(1), 1)
        local B = torch.repeatTensor(
					(A - maxA):exp():sum(1):log(), A:size(1), 1)
        return (A - maxA - B):exp()
end

-- M[i,:,:] = u[i,:] * v[i,:]
function multi_oprod(u, v)
	M = torch.Tensor(u:size(1), u:size(2), v:size(2))
	for i = 1,M:size(1) do
		M[{i,{},{}}]:mm(u[{{i},{}}]:t(), v[{{i},{}}])
		--M[{i,{},{}}]:addr(u[{i,{}}], v[{i,{}}])
	end
	return M
end

-- test --
function test()
	A = torch.rand(5, 10)
	ssmA = compute_safe_softmax(A)

	temp = torch.exp(A)
	smA = temp:cdiv( torch.repeatTensor( temp:sum(1), temp:size(1), 1))

	print(ssmA)
	print(smA)
end
