import tensorflow as tf

# 设置随机参数 w 的初始值
w = tf.Variable(tf.constant(5, dtype=tf.float32))
# 学习率
lr = 0.2
# 循环训练 40 次
epoch = 40

# 损失函数位 (w + 1)²
for epoch in range(epoch):
    # with 结构到 grads 框起了梯度计算的过程
    with tf.GradientTape() as tape:
        loss = tf.square(w + 1)
        # .gradient 函数告知对谁求导
    grads = tape.gradient(loss, w)

    # assign_sub对变量做自减 即:w-= lr * grads
    w.assign_sub(lr * grads)
    print("After %s epoch, w is %f , loss is %f " % (epoch, w.numpy(), loss))
