import numpy as np
from activations import Sigmoid

'''
ref:
    https://blog.csdn.net/qq_36937684/article/details/105333895
    https://zhuanlan.zhihu.com/p/115571464
'''


class LinearLayer():
    def __init__(self, num_inputs, num_outputs):
        # 权重 --- (num_input, num_output) 每一列代表一个节点
        self.w = np.random.rand(num_inputs, num_outputs)
        # 偏置 --- (1, num_ouput) 行向量
        self.b = np.zeros((1, num_outputs))                 

        self._grad_w = np.zeros_like(self.w)
        #self._grad_b = -np.ones_like(self.b)

    def _forward(self, X):
        # (1, num_input)@(num_input, num_output)=(1, num_output)
        z = X @ self.w - self.b # z = wx - b

        self._grad_w = X  #p_z/p_w --- (1, num_input)
        return z

    def _backward(self, grad_prev, lr):
        grad_e_w = np.transpose(self._grad_w) @ grad_prev
        grad_e_b = -grad_prev.mean(axis = 0)    #p_e/p_b --- (1, num_output)
        grad_e_x = grad_prev @ np.transpose(self.w) #p_e/p_x --- (1, num_input)

        # 更新参数
        self.w -= lr * grad_e_w
        self.b -= lr * grad_e_b

        return grad_e_x

    def _get_params(self):
        return {'w':self.w, 'b':self.b}

    def set_params(self, arg):
        self.w = arg['w']
        self.b = arg['b']


class SigmoidLayer():
    def __init__(self):
        self._grad_z = 0

    def _forward(self, Z):
        y = Sigmoid.fn(Z)
        self._grad_z = Sigmoid.grad(Z)  #p_y/p_z --- (1, num_output)
        return y
    
    def _backward(self, grad_prev, lr=0):
        return grad_prev *  self._grad_z #p_e/p_y * p_y/p_z --- (1, num_output)

    def _get_params(self):
        return None
    
    def set_params(self, arg):
        return None
