from pytorch_loss import TaylorSoftmaxV3
from torch.nn import ELU
import torch.nn as nn
import torch

class EluTaylorSoftmax(nn.Module):
    """
        用于替代通常Softmax，使用Elu与TaylorSoftmax的组合，将非线性部分由
        指数转化为多项式，使用ELU保证单调性。
    """

    def __init__(self, dim=1, n=2):
        super().__init__()
        self.elu = ELU()
        self.taylor_softmax = TaylorSoftmaxV3(dim=dim, n=n)

    def forward(self, x):
        """
            usage similar to nn.Softmax:
            >>> mod = TaylorSoftmaxV3(dim=1, n=4)
            >>> inten = torch.randn(1, 32, 64, 64)
            >>> out = mod(inten)
        """
        x = self.elu(x)
        out = self.taylor_softmax(x)
        return out


if __name__ == "__main__":
    m = EluTaylorSoftmax(dim=2, n=2).cuda()
    input = torch.randn(3,3,9).cuda()
    out = m(input)
    print(out)