import torch
def _generate_square_subsequent_mask(sz):
    mask = (torch.triu(torch.ones(sz, sz)) == 1).transpose(0, 1)
    mask = mask.float().masked_fill(mask == 0, float('-inf')).masked_fill(mask == 1, float(0.0))
    return mask
mask = _generate_square_subsequent_mask(20)
def padding_mask(seq_k, seq_q, pad_token=1):
    # seq_k 和 seq_q 的形状都是 [B,L]
    len_q = seq_q.size(1)
    pad_mask = seq_k.eq(pad_token)
    pad_mask = pad_mask.unsqueeze(1).expand(-1, len_q, -1)  # shape [B, L_q, L_k]
    return pad_mask

def sequence_mask(seq_len):
    mask = torch.triu(torch.ones((seq_len, seq_len)), diagonal=1)
    mask = mask.unsqueeze(0)  # [1, L, L]
    return mask
k = torch.ones((16,20))
pad = sequence_mask(k.size(-1))
print(pad, pad.shape)