import torch
import numpy
from torch.types import Any
from pprint import pprint


class Gelu(torch.autograd.Function):
    """
        GELU x = x * sigmoid(1.702 * x)
        GELU x' = sigmoid(1.702 * x) + 1.702 * x * sigmoid(1.702 * x)(1-sigmoid(1.702 * x))
    """
    ParamAlpha = 1.702

    @staticmethod
    def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any:
        ctx.input = args[0]
        return ctx.input * torch.sigmoid(Gelu.ParamAlpha * ctx.input)

    @staticmethod
    def backward(ctx: Any, *grad_outputs: Any) -> Any:
        tmp = torch.sigmoid(Gelu.ParamAlpha * ctx.input)
        return grad_outputs[0] * (tmp + Gelu.ParamAlpha * ctx.input * tmp * (1 - tmp))


if __name__ == "__main__":
    InputData = torch.randn(4, 2, dtype=torch.float, requires_grad=True)
    # test = torch.autograd.gradcheck(Gelu.forward, InputData, eps=1e-6, atol=1e-4)
    print(f"原始数据：{InputData}")
    Out = Gelu.apply(InputData)
    print(f"激活数据：{Out}")
    Out.backward(torch.ones(4, 2))
    print(f"一阶导数: {InputData.grad}")
