import torch
from torch.nn import functional as F


def sample(dist, top_k=1):
    """
    从分布dist中采样词汇,仅仅在概率最大的k个词中采样
    Input:
        dist [batch_size, vocab_size]
    Output:
        [batch_size, 1]
    """
    if top_k <= 1:
        top_k_scores, top_k_ids = dist.topk(1, dim=1)
    else:
        # dist [batch,vocab_size]
        top_values, top_indices = torch.topk(dist, top_k, dim=1)  # 选取前k个 [batch, k]
        kth_best = top_values[:, -1].view([-1, 1])  # k个最大的中最小的 # [batch , 1]
        kth_best = kth_best.repeat([1, dist.shape[1]]).float()  # 按列重复 [batch, vocab_size]
        ignore = torch.lt(dist, kth_best)  # [batch, vocab_size]
        dist = dist.masked_fill(ignore, float('-inf'))  # [batch, vocab_size]
        dist = F.softmax(dist, dim=1)
        m = torch.distributions.Multinomial(probs=dist, total_count=1)
        top_k_ids = torch.argmax(m.sample(), dim=1, keepdim=True)
        top_k_scores = dist.gather(dim=1, index=top_k_ids)
    return top_k_ids, top_k_scores
