import numpy as np

# 单层模型
class Layer:
    def __init__(self, input_num, output_num, weight=None, bias=None):
        self.weights = np.random.randn(input_num, output_num) * np.sqrt(1 / output_num)
        self.bias = np.random.randn(1, output_num) * 0.1
        self.dw = None
        self.db = None
        self.x = None
        self.z = None
        self.cache = None
    def sigmoid(x):
        return 1 / (1 + np.exp(-x))
    def sigmoid_derive(self,x):
        return self.sigmoid(x) * (1 - self.sigmoid(x))
    def foward(self, input):
        self.x = input
        z = np.dot(input, self.weights) + self.bias
        self.z = z
        activate_value = self.sigmoid(z)
        return activate_value
    def backward(self, input):
        dz = input * self.sigmoid_derive(self.z)
        print(dz.shape)
        self.dw = np.dot(self.x.T, dz)
        self.db = dz
        self.cache = np.dot(dz, self.weights.T)
        return self.cache
    def update(self, learning_rate):
        self.weights -= learning_rate * self.dw
        self.bias -= learning_rate * self.db
