# 某神经节点的输出=激活函数(权重*输入+偏置)
# 数学表示y=f(AX + b)

# 反向传播算法思路
# 首先调整最后一层(靠近输出)网络的权重和偏置

# 本网络的输入输出向量都是列向量(直观反映神经网络图)
# 分类任务：给定笛卡尔坐标系坐标，判断该点是否在圆内

import numpy
import random

def CreateMat(rows, cols) -> any:
    """生成服从标准正态分布的随机矩阵
    Args: 
        rows (int): 行数
        cols (int): 列数
    Returns:
        矩阵
    """
    return numpy.random.randn(rows, cols)

def ReLU(x) -> any:
    """
    ReLU激活函数
    """
    return numpy.maximum(0, x)

def SoftMat(x) -> any:
    """soft max激活函数
        映射到exp(u);u < 0的值
        归一化输出(概率总和为1)
    Args:
        x (mat): 列向量
    """
    # print(f"x={x}")
    
    max_x = numpy.max(x, axis=0, keepdims=True) #竖向取最大值
    # print(f"max_x={max_x}")
    
    slide_x = x - max_x #平移值
    # print(f"slide_x={slide_x}")
    
    exp_x = numpy.exp(slide_x) #指数化
    exp_x_sum = numpy.sum(exp_x, axis=0, keepdims=True)
    
    return exp_x / exp_x_sum

def Normalize(x) -> any:
    """数据标准化
        缩放至值范围[-1,1]
    Args: x(mat): 列向量
    """
    x_abs = numpy.abs(x)
    x_max = numpy.max(x_abs)
    return x / x_max

def CaculateLable(x, y, r) -> int:
    """
    通过圆点的坐标和半径来计算lable
    """
    if (x ** 2 + y ** 2) >= (r ** 2):
        return 0
    return 1

def LossFunc(predict, target) -> any:
    """损失函数
    Args:
        predict (mat): 预测结果向量
        target (mat): 实际结果向量
    Retruns:
        偏差向量、损失向量
    """
    return numpy.abs(target - predict)

def DemandFunc(predict, target) -> any:
    """需求函数，最后一层网络的输出向量的变化需求
    Args:
        predict (mat): 预测结果向量
        target (mat): 实际结果向量
    Retruns:
        变化需求向量
    """
    error = target - predict #偏差矩阵、向量
    return error

def GetAdjustWeightsMat(wights_mat, demand) -> any:
    """获取权重调整矩阵，由原来的权重矩阵和需求向量得到
    Args:
        wights_mat (mat): 原始的权重矩阵
        demand (mat): 需求向量
    Retruns:
        权重调整矩阵
    """
    row, col = wights_mat.shape
    # print(f"wights_mat={wights_mat}")
    wights_mat_abs = numpy.abs(wights_mat)
    # print(f"wights_mat_abs={wights_mat_abs}")
    # print(f"wights_mat={wights_mat}")
    mat = numpy.zeros((row, col))
    
    for i in range(row):
        for j in range(col):
            mat[i, j] = wights_mat_abs[i, j] * demand[i, 0]
            
    return mat

# 数据元素
class DataItem:
    def __init__(self, x, y, r, label = 1):
        self.x = x
        self.y = y
        self.r = r
        self.real_lable = label #-1未知，0圆外，1:圆内
        self.predict_lable = -1 #-1未知，0圆外，1:圆内

def CreateDataSet(n) -> any:
    """创建数据集"""
    data_set = []
    r = 1
    for i in range(n):
        x = random.uniform(-2, 2)
        y = random.uniform(-2, 2)
        lable = CaculateLable(x, y, r)
        data_set.append(DataItem(x, y, r, lable))
        
    return data_set

# 单层神经网络
class layer:
    def __init__(self, input_rows, input_cols, output_rows, output_cols):
        """
        Args:
            input_rows (int): 输入行数
            input_cols (int): 输入列数
            output_rows (int): 输出行数
            output_cols (int): 输出列数
        Returns:
            layer
        """
        self.input_rows = input_rows #输入行数
        self.input_cols = input_cols #输入列数
        self.output_rows = output_rows #输出行数
        self.output_cols = output_cols #输出列数
        self.wights_mat = None #权重矩阵
        self.bias = 0.0 #偏置值
        self.output = None #输出矩阵
        
    def GetWightsMatShape(self) -> any:
        """
        获取权重矩阵的形状
        """
        return (self.output_rows, self.input_rows)
    
    def SetWightsMat(self, mat) -> any:
        """
        设置权重矩阵
        """
        self.wights_mat = mat
    
    def SetBias(self, bias) -> any:
        """
        设置偏置值
        Args:
            bias (float): 偏置值
        """
        self.bias = bias
    
    def Forward(self, input) -> any:
        """
        前向传播
        Args:
            input (mat): 输入矩阵
        """
        self.output = numpy.matmul(self.wights_mat, input) + self.bias
        return self.output
    
# 神经网络
class NeuralNetwork:
    def __init__(self, shape):
        """
        Args:
            shape (list): 各个输入\输出向量的个数(例如[3, 4, 2]表示网络形状为:3维向量->4维向量->2维向量)
        """
        self.shape = shape
        self.layers_num = len(shape) - 1
        self.layers = []
        self.output = None
        
        # 构建层
        for i in range(0, self.layers_num, 1):
            input_rows = self.shape[i]
            input_cols = 1
            output_rows = self.shape[i + 1]
            output_cols = 1
            
            layeri = layer(input_rows, input_cols, output_rows, output_cols)
            
            wights_mat_shape = layeri.GetWightsMatShape()
            wights_mat = CreateMat(wights_mat_shape[0], wights_mat_shape[1])
            layeri.SetWightsMat(wights_mat)
            
            layeri_bias = CreateMat(1, 1)
            layeri.SetBias(layeri_bias)
            
            self.layers.append(layeri)
    
    def Forward(self, input) -> any:
        """前向传播
        Args:
            input (mat): 输入矩阵、向量
        Returns:
            any: 输出矩阵、向量
        """
        layer_input = None
        layer_output = None
        for i in range(0, self.layers_num, 1):
            if (i == 0):
                layer_input = input
                layer_output = Normalize(ReLU(self.layers[i].Forward(layer_input)))
            elif (i == self.layers_num -1):
                layer_input = layer_output
                layer_output = SoftMat(self.layers[i].Forward(layer_input))
                self.output = layer_output
            else:
                layer_input = layer_output
                layer_output = Normalize(ReLU(self.layers[i].Forward(layer_input)))
        
        return self.output
    
    def Train(self, data_set) -> any:
        return None
    
    def Predict(self, data) ->any:
        return None
    
def test0() -> None:
    # print(CreateMat(2, 3))
    # print(ReLU(1.1))
    # print(ReLU(numpy.array([-1, 1])))
    # print(SoftMat(CreateMat(3, 1)))
    print(Normalize(CreateMat(3, 1)))
    
def test1() -> None:
    layer1 = layer(3, 1, 2, 1)
    print(layer1.GetWightsMatShape())
    
    wights_mat1 = CreateMat(2, 3)
    print(f"wights_mat1={wights_mat1}")
    
    layer1.SetWightsMat(wights_mat1)
    
    bias1 = CreateMat(1, 1)
    print(f"bias1={bias1}")
    layer1.SetBias(bias1)
    
    input1 = CreateMat(3, 1)
    print(f"input1={input1}")
    
    output1 = layer1.Forward(input1)
    print(f"output1={output1}")
    
def test2() -> None:
    network = NeuralNetwork([3, 4, 2])
    input = CreateMat(3, 1)
    print(network.Forward(input))
    
def test3() -> None:
    layer0 = layer(3, 1, 2, 1)
    target = numpy.array([[1.0], 
                          [2.0]])
    
    input = CreateMat(3, 1)
    print(f"input={input}")
    
    origin_mat = CreateMat(2, 3)
    print(f"origin_mat={origin_mat}")
    
    while True:
        layer0.SetWightsMat(origin_mat)
        
        output = layer0.Forward(input)
        print(f"output={output}")
        
        loss = LossFunc(output, target)
        print(f"loss={loss}")
        
        if numpy.sum(loss) < 1:
            print("finished")
            break
        
        demand = DemandFunc(output, target)
        print(f"demand={demand}")
        
        adjust_mat = GetAdjustWeightsMat(origin_mat, demand)
        print(f"adjust_mat={adjust_mat}")
        
        origin_mat = origin_mat + adjust_mat
        print(f"origin_mat={origin_mat}")

if __name__ == "__main__":
    # test0()
    # test1()
    # test2()
    test3()
    
    exit(0)