import tensorflow as tf

w=tf.Variable(tf.constant([5,10],shape=(2), dtype=tf.float32))
print(w.numpy())
lr=0.2
epochs=40

for epoch in range(epochs):
    with tf.GradientTape() as tape:
        loss=tf.square(w+1)
    grads=tape.gradient(loss, w)

    w.assign_sub(lr*grads)
    print(f"after {epoch+1} epoch,w is {w.numpy()},loss is {loss}")
