# 层基类
class Layer:
    def __init__(self):
        self.weights = 0            # 权重参数
        self.gradient_w = 0         # 权重梯度
        self.biases = 0             # 偏置参数
        self.gradient_b = 0         # 偏置梯度
        self.learning_rate = 0.01   # 学习率

    def forward(self, input_tensor):
        return input_tensor

    def backward(self, error_tensor):
        return error_tensor

    # 更新权重
    def update_weights(self):
        self.weights -= self.gradient_w * self.learning_rate
        self.biases -= self.gradient_b * self.learning_rate

    # 更新学习率
    def update_learning_rate(self, lr):
        self.learning_rate = lr

