"""
@author: admin
@time: 2025/6/28 13:21
神经网络测试，数据来自《深度学习的数学》 nn.xlsx
"""
import torch

batch = 64  # 在一个批次中输入数据的数量
hidden_layer = 3  # 隐藏层单元数量
input_num = 12  # 每个数据包含的数据特征
output_num = 2  # 输出层单元数量
# 原始数据（12行，3列）
data = [
    [3.214, -4.562, -0.541],
    [-2.359, -1.071, -2.808],
    [-1.382, 3.991, -2.218],
    [5.730, 5.310, -2.286],
    [-4.044, -3.275, 1.017],
    [-1.716, 5.457, -1.821],
    [5.361, 0.303, 0.940],
    [-0.289, 3.505, 1.463],
    [-1.712, 3.601, -0.774],
    [-1.456, -0.836, -2.440],
    [1.496, -0.193, 3.128],
    [0.423, -3.249, 2.292]
]

# 转换为 PyTorch 张量（形状：12x3），变量名为 w2
w2 = torch.tensor(data)

data = [[-3.5750, -0.9410],
        [4.4460, 2.9310],
        [5.6660, -3.4100]]

w3 = torch.tensor(data)

b2 = torch.tensor([-1.3480, 2.5390, -3.8790])
b3 = torch.tensor([-8.087, 2.727])

t = torch.tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
                  0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1.,
                  1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
                  1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])
print('t_shape', t.expand(output_num, -1).shape)
# print('t_shape',t.shape)
x1 = ([1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0,
       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1,
       1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1,
       0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0,
       1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0,
       0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0,
       1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0,
       1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0,
       0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0,
       0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0])
x2 = ([1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1,
       0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1,
       1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1,
       1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1,
       1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1,
       1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0,
       0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0,
       0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1,
       0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0,
       0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0,
       0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0])
x3 = ([1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1,
       1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0,
       1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0,
       1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1,
       0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0,
       0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1,
       0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1,
       0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0,
       0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0,
       0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0])
x4 = ([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0,
       0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1,
       1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
       1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1,
       1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0,
       0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0,
       0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1,
       0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0,
       1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0,
       0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0])
x1 = torch.tensor(x1, dtype=torch.float32).reshape(64, 3)
x2 = torch.tensor(x2, dtype=torch.float32).reshape(64, 3)
x3 = torch.tensor(x3, dtype=torch.float32).reshape(64, 3)
x4 = torch.tensor(x4, dtype=torch.float32).reshape(64, 3)
x = torch.cat([x1, x2, x3, x4], dim=1)
print('xshape:', x.shape)
# x = torch.randn(batch, input_num)
# y = torch.randn(batch, output_num)
learning_rate = 0.01

# print(f't={t.expand(2,-1)}')
for i in range(6000):
    # 将b2进行广播 增加一个维度
    z2 = (x @ w2) + b2.expand(batch, -1)
    # print(z1.shape)
    a2 = 1 / (1 + torch.exp(-z2))
    # print(a2.shape)  # (batch,hidden_layer)
    z3 = a2 @ w3 + b3.expand(batch, -1)
    a3 = 1 / (1 + torch.exp(-z3))
    # print(f'a3 shape: {a3.shape}')
    loss = ((a3 - (t.expand(output_num, -1)).T)).pow(2).sum()
    if i % 100 == 0:
        print(f'{i: }, loss:{loss}')
    grad_l = 2 * (a3 - (t.expand(output_num, -1)).T)
    grad_z3 = grad_l * a3 * (1 - a3)
    grad_b3 = grad_z3.sum(dim=0)  # 对batch维度求和
    grad_w3 = a2.T @ grad_z3
    grad_a2 = grad_z3 @ w3.T
    grad_z2 = grad_a2 * a2 * (1 - a2)
    grad_b2 = grad_z2.sum(dim=0)  # 对batch维度求和
    grad_w2 = x.T @ grad_z2

    w2 -= learning_rate * grad_w2
    w3 -= learning_rate * grad_w3
    b2 -= learning_rate * grad_b2
    b3 -= learning_rate * grad_b3

print(f'w2={w2}')
print(f'w3={w3.T}')
print(f'b2={b2}')
print(f'b3={b3}')
