# autograd
from __future__ import print_function
import torch

# tensor的属性requires_grad=true就可以追踪所有操作
# 反向传播时调用backward()就可以计算所有梯度的总和grad
x1 = torch.ones(3, 3)
print(x1)

x = torch.ones(2, 2, requires_grad=True)
print(x)
y = x + 2
print(y)
print("-----grad_fn--------")
print(x.grad_fn)
print(y.grad_fn)

z = y * y * 3
# 平均值
out = z.mean()
print(z)
print(out)
out.backward()
print("反向传播：")
print(x.grad)

# requires_grad_()可以原地改变tensor的属性
print("-----------------")
a = torch.randn(2, 2)
a = ((a * 3) / (a - 1))
print(a)
print(a.requires_grad)
a.requires_grad_(True)
b = (a * a).sum()
print(b)

# 自动求导设置
print("-----------------")
print(x.requires_grad)
y = x ** 2
print(y.requires_grad)
y = x.detach()
print(y.requires_grad)
with torch.no_grad():
    y = x ** 2
    print(y.requires_grad)
