import torch
import torch.nn as nn
from   torch.utils.cpp_extension import load


#! pytorch自动编译，生成对应的模块
gelu = load(name="gelu", sources=["gelu.cc", "gelu_kernel.cu"])

#! 2. 定义可以进行前项和后算运算GeLU算子
class MyGeLUAutoGrad(torch.autograd.Function):
    @staticmethod
    def forward(ctx, x):
        ctx.save_for_backward(x)       # 保存上下文用于反向传播
        return gelu.forward(x)
    
    @staticmethod
    def backward(ctx, grad_output):   # y.backward(torch.ones_like(y)) 传进来的
        x, = ctx.saved_tensors
        return gelu.backward(grad_output, x)
    

class GeLU(nn.Module):
    def __init__(self):
        super(GeLU, self).__init__()
    
    def forward(self, x):
        return MyGeLUAutoGrad.apply(x)
    
#! 3. 测试
x = torch.randn(2, 10, dtype=torch.float32, requires_grad=True)
print(x)

net = GeLU()
y   = net(x)

y.backward(torch.ones_like(y), retain_graph=True)     
print(x.grad)


print("#########################################################")
xx = torch.randn(2, 10, dtype=torch.float32, requires_grad=True, device="cuda")
print(xx)

# net = GeLU().cuda()
# yy  = net(xx)
# # print(yy)

# yy.backward(torch.ones_like(yy).cuda(), retain_graph=True)     
# print(xx.grad)

#! 自动梯度求解
flag = torch.autograd.gradcheck(MyGeLUAutoGrad.apply, (xx, ), eps=1e-3)

print(f"success = {flag}")