import torch


class Line(torch.autograd.Function):

    @staticmethod
    def forward(ctx, w, x, b):
        """Y = W*X +b"""
        ctx.save_for_backward(w, x, b)
        return w * x + b

    @staticmethod
    def backward(ctx, grad_out):
        """
        grad_out表示上一级的梯度，反应链式法则
        """
        # print("grad_out: ", grad_out)
        w, x, b = ctx.saved_tensors
        grad_w = grad_out * x
        grad_x = grad_out * w
        grad_b = grad_out
        return grad_w, grad_x, grad_b


if __name__ == '__main__':
    w = torch.rand(2, 2, requires_grad=True)
    x = torch.rand(2, 2, requires_grad=True)
    b = torch.rand(2, 2, requires_grad=True)

    # 调用Line.apply调用forward前向 函数
    out = Line.apply(w, x, b)
    print("out.shape: ", out.shape)
    print("============== out: ")
    print(out)
    # 反向传播
    out.backward(torch.ones((2, 2)))

    print("============== w, x, b : ")
    print(w, x, b)
    print("============== w.grad, x.grad, b.grad :")
    print(w.grad, x.grad, b.grad)
