import torch

x_data = [1.0,2.0,3.0]
y_data = [2.0,4.0,6.0]

w = torch.Tensor([1.0])
w.requires_grad = True # 需要计算梯度

def forward(x):
    return w*x #前馈 返回值是个Tensor 此处*被重载，成为Tensor和Tensor之间的乘法

def loss(x,y):
    y_pred = forward(x)
    return (y_pred - y)**2 # 损失

print('Before',4,forward(4).item())

for epoch in range(100):
    for x,y in zip(x_data,y_data):
        l = loss(x,y) # 前馈，计算损失
        l.backward()
        # print('\tgrad:',x,y,w.grad.item())
        w.data -= 0.01 * w.grad.data

        w.grad.data.zero_() # 梯度计算会被累计 ，不清零的话对下次梯度的计算造成影响
    print('progress:',epoch,l.item())
print('After',4,forward(4).item())



