#随机梯度下降（stochastic gradient descent）
#梯度下降代码实现
import matplotlib.pyplot as plt

x_data = [1.0,2.0,3.0]
y_data = [2.0,4.0,6.0]

w = 1.0# Initial guess of weight 初始的权重猜测

#画图准备
loss_list = []
epoch_list = []

def forward(x):
    return x*w

# 梯度下降中的cost
# def cost(xs,ys):#计算MSE
#     cost = 0
#     for x,y in zip(xs,ys):
#         y_pred = forward(x)
#         cost += (y_pred - y) ** 2
#     cost_list.append(cost)
#     return cost / len(xs)

def loss(x,y):
    y_pred = forward(x)
    return (y_pred - y) ** 2 #一个样本的误差

def gradient(x,y):
    return 2*x*(x * w - y)# 一个样本的导数


print("Predict (before traning）",4,forward(4))

for epoch in range(100):#100个epoch
    epoch_list.append(epoch)
    for x,y in zip(x_data,y_data):
        grad = gradient(x,y)
        w -= 0.01 * grad  # 默认学习率为0.01
        print("\t part w:",w)
        print("\tgrad:",x,y,grad)
        l = loss(x,y)
    loss_list.append(l)
    print("progress:",epoch,"w=",w,"loss=",l)

print("Predict (after training）",4,forward(4))

#Drwa the graph
plt.plot(epoch_list,loss_list)
plt.ylabel('loss')
plt.xlabel('Epoch')
plt.show()