import os
import torch
from numbers import Number
from torch.autograd import Variable
import torch.nn.functional as F

def cuda(tensor, is_cuda):
    if is_cuda : return tensor.cuda()
    else : return tensor

def reparametrize_mustd(statistics, K):
    mu = statistics[:, :K]
    std = F.softplus(statistics[:, K:] - 5, beta=1)
    return mu, std

def reparametrize_n(mu, std, n=1):
    # reference :
    # http://pytorch.org/docs/0.3.1/_modules/torch/distributions.html#Distribution.sample_n
    def expand(v):
        if isinstance(v, Number):
            return torch.Tensor([v]).expand(n, 1)
        else:
            return v.expand(n, *v.size())

    if n != 1 :
        mu = expand(mu)
        std = expand(std)

    eps = Variable(cuda(std.data.new(std.size()).normal_(), std.is_cuda))

    return mu + eps * std