import torch
from torch import nn

def masked_softmax(x, valid_len):
    def _mask(x, valid_len):
        mask=torch.arange(x.size(1), dtype=torch.float32, device=x.device)
        mask=mask[None, :]<valid_len[:, :, None]
        x[~mask]=-1e9
        return x
    
    if valid_len is None:
        return nn.functional.softmax(x, dim=-1)
    if valid_len.dim()==1:
        valid_len=torch.repeat_interleave(valid_len, x.size(1)).reshape(x.size(0), x.size(1))
    x=_mask(x, valid_len)
    return nn.functional.softmax(x, dim=-1)

x=torch.rand(2,4,4)
valid_len=torch.tensor([[1,2,3,4], [1,2,3,4]])
result=masked_softmax(x, valid_len)
print(result)