import torch  # 必须先导入torch
import sigmoid_op


class Sigmoid(torch.autograd.Function):
    @staticmethod
    def forward(ctx, input: torch.Tensor):
        ctx.save_for_backward(input) # 存储中间值
        return sigmoid_op.sigmoid_forward(input)

    @staticmethod
    def backward(ctx, output_grad):
        # dl/dx = dl/dy*dy/dx
        input, = ctx.saved_tensors #提取中间值
        print(f'saved_tensors: {input}')
        dodi = sigmoid_op.sigmoid_backward(input)
        grad_result = dodi * output_grad
        print(f'backward result: {grad_result}')
        return grad_result
