import paddle as pp
from paddle import nn


class XSoftmaxLayer(nn.Layer):
    """
    Unoptimized version of Masked Softmax using Layer. Sorry.
    """
    def forward(self, input, mask, dim):
        # There is no `masked_fill` in paddle,
        # use `paddle.where` which operates with a revered condition
        output = pp.where(mask.astype(pp.bool).expand_as(input), input, pp.ones_like(input) * float('-inf'))
        output = nn.functional.softmax(output, dim)
        output = pp.where(mask.astype(pp.bool).expand_as(output), output, pp.zeros_like(output)) # in torch, softmax(-inf) -> nan, so they fill it back with 0. but in paddle softmax(-inf) -> 0 so we are good
        return output


XSoftmax = XSoftmaxLayer()
