import random
import torch
import matplotlib.pyplot as plt

"""
SGD算法：每次使用单个样本去计算梯度，并且是随机的（随机打乱数据点的顺序）
"""

# 1. 生成数据   样本点
data = []
for i in range(20):
    x = [random.uniform(0.5, 1.5), random.uniform(0.5, 1.5)]
    target = 0
    data.append((x, target))

# 2. 超参数
lr = 0.05
Epochs = 20
w1 = -1
w2 = 1

''' 绘制等高线图 '''
x1 = torch.linspace(-1, 1, 100)
x2 = torch.linspace(-1, 1, 100)
xx1, xx2 = torch.meshgrid(x1, x2, indexing="ij")
loss = xx1 ** 2 + 2 * xx2 ** 2
fig = plt.figure()
ax = fig.add_subplot()
ax.contour(xx1, xx2, loss)
# 定义一个列表，用于存储梯度下降的路径点
points = []

# 3. SGD算法的循环训练（带有数据随机化）
for epoch in range(Epochs):
    points.append([w1, w2])
    random.shuffle(data)
    total_loss = 0
    for data_point in data:
        x, target = data_point
        # 计算当前 单个数据点的梯度
        pre = w1 * x[0] + w2 * x[1]
        error = pre - target
        loss = error ** 2
        total_loss += loss
        g1 = 2 * error * x[0]
        g2 = 2 * error * x[1]
        # 注意：SGD中使用当前数据点的梯度
        w1 -= lr * g1
        w2 -= lr * g2
    print(f"Loss:{total_loss / len(data):.4f}")

points = torch.tensor(points)
ax.plot(points[:, 0], points[:, 1], "ko-")
plt.show()
