import matplotlib.pyplot as plt
import torch

x_data = [[73., 80., 75.], [93., 88., 93.],
          [89., 91., 90.], [96., 98., 100.], [73., 66., 70.]]
y_data = [[152.], [185.], [180.], [196.], [142.]]
# 1.使用pytorch进行线性回归处理
# (1)数据处理
# ①将上面数据传入tensor
x = torch.Tensor(x_data)
y = torch.Tensor(y_data)

print(x.shape)
print(y.shape)

# ②创建线性回归模型
model = torch.nn.Linear(in_features=x.shape[1], out_features=y.shape[1])
# ③设置mse损失
loss_fn = torch.nn.MSELoss()
# ④设置随机梯度下降，学习率自定
op = torch.optim.Adam(params=model.parameters(), lr=0.1)
# (2)模型处理
# ①循环2000次
loss_list = []
for i in range(2000):
    # ②初始化梯度
    op.zero_grad()
    # ③进行梯度下降处理
    h = model(x)
    loss = loss_fn(h, y)
    loss.backward()
    op.step()
    loss_list.append(loss.data.numpy())
    # ④每10次打印损失值
    if i % 10 == 0:
        print(i, loss.data.numpy())
# ⑤打印预测结果
print(model(x))
# ⑥绘制损失值曲线
plt.plot(loss_list)
plt.show()
