# coding:utf-8
# Author : hiicy redldw
# Date : 2019/04/15
import tensorflow as tf
# tf.GradientTape 是一种选择性功能，可在不跟踪时提供最佳性能。
# 由于在每次调用期间都可能发生不同的操作，因此所有前向传播操作都会记录到“磁带”中。
# 要计算梯度，请反向播放磁带，然后放弃。特定的 tf.GradientTape 只能计算一个梯度；随后的调用会抛出运行时错误
tf.enable_eager_execution()
w = tf.Variable([[1.0]])
with tf.GradientTape() as tape:
    loss = w * w

grad = tape.gradient(loss, w)
print(grad)


x = tf.ones((2, 2))

with tf.GradientTape(persistent=True) as t:
    t.watch(x)
    y = tf.reduce_sum(x)
    z = tf.multiply(y, y)
print(tf.reduce_sum(x),'----------')
# Derivative of z with respect to the original input tensor x
dz_dx = t.gradient(z, x)
dz_dy = t.gradient(z,y)
print(dz_dx,'-------')  # 返回对应传的x的shape
print(dz_dy,'============')
for i in [0, 1]:
    for j in [0, 1]:
        assert dz_dx[i][j].numpy() == 8.0
x=tf.Variable(1.0)
with tf.GradientTape() as t:
    with tf.GradientTape() as t2:
        y = x * x * x
    # Compute the gradient inside the 't' context manager
    # which means the gradient computation is differentiable as well.
    dy_dx = t2.gradient(y, x)  # 直接计算梯度3
d2y_dx2 = t.gradient(dy_dx, x)  # 6 求二阶导
print(dy_dx)
