import torch
import torch.nn as nn
import torch.nn.functional as F


'''

单个梯度的计算
'''

# x=torch.randn(5,5,requires_grad=True)
# y=torch.randn(5,5,requires_grad=True)

# print('x',x)
# print('y',x)




# loss = nn.MSELoss()(x,y)
# loss.backward()
# print('x的梯度',x.grad)
# print('手动计算的提取',2*(x-y)/25)

'''
x tensor([[ 1.8071, -0.4026,  1.2456, -0.1049, -0.5836],
        [ 2.1934, -0.3836, -0.4395, -0.5803,  1.4962],
        [ 0.6408,  0.3258,  1.1157, -0.5545,  1.2839],
        [ 0.2640, -0.5274, -1.0247, -0.2381, -0.4271],
        [-0.9854,  0.5780,  0.3419, -0.5261,  0.4050]], requires_grad=True)
y tensor([[ 1.8071, -0.4026,  1.2456, -0.1049, -0.5836],
        [ 2.1934, -0.3836, -0.4395, -0.5803,  1.4962],
        [ 0.6408,  0.3258,  1.1157, -0.5545,  1.2839],
        [ 0.2640, -0.5274, -1.0247, -0.2381, -0.4271],
        [-0.9854,  0.5780,  0.3419, -0.5261,  0.4050]], requires_grad=True)
x的梯度 tensor([[ 0.1491, -0.0224,  0.0677,  0.0582,  0.0738],
        [ 0.2213, -0.0017,  0.0278, -0.1634,  0.0967],
        [ 0.0576, -0.0465, -0.0507, -0.0842,  0.0600],
        [ 0.1181, -0.0754, -0.0788, -0.1382, -0.0807],
        [-0.1598,  0.0176,  0.0837, -0.1259,  0.0797]])
手动计算的提取 tensor([[ 0.1491, -0.0224,  0.0677,  0.0582,  0.0738],
        [ 0.2213, -0.0017,  0.0278, -0.1634,  0.0967],
        [ 0.0576, -0.0465, -0.0507, -0.0842,  0.0600],
        [ 0.1181, -0.0754, -0.0788, -0.1382, -0.0807],
        [-0.1598,  0.0176,  0.0837, -0.1259,  0.0797]], grad_fn=<DivBackward0>)


'''


'''
如果梯度不清零的话 ，会出现一个问题，梯度一致会累加
例如  x的提取  第一次是 -0.1011  第二次就是 -0.2022  因为会保留第一次的梯度 所以每次需要清零

'''


x=torch.randn(5,5,requires_grad=True)
y=torch.randn(5,5,requires_grad=True)

print('x',x)
print('y',x)
for _ in range(2):
    loss = nn.MSELoss()(x,y)
    loss.backward()
    print('x的梯度',x.grad)
    print('手动计算的提取',2*(x-y)/25)
    x.grad.zero_()  # 清空梯度
    
'''
梯度不清零的情况下
x tensor([[-0.6722, -0.3389,  0.1352,  0.4232, -0.2228],
        [ 0.6902, -0.0512, -0.6944,  0.7166,  0.5740],
        [-0.9518, -0.6644,  0.3154, -0.4156,  0.0726],
        [-0.8459,  0.1159,  1.8857, -0.2239, -2.5777],
        [ 0.0682, -1.0383,  1.3385, -0.0944,  0.7507]], requires_grad=True)
y tensor([[-0.6722, -0.3389,  0.1352,  0.4232, -0.2228],
        [ 0.6902, -0.0512, -0.6944,  0.7166,  0.5740],
        [-0.9518, -0.6644,  0.3154, -0.4156,  0.0726],
        [-0.8459,  0.1159,  1.8857, -0.2239, -2.5777],
        [ 0.0682, -1.0383,  1.3385, -0.0944,  0.7507]], requires_grad=True)
        [ 0.0682, -1.0383,  1.3385, -0.0944,  0.7507]], requires_grad=True)
x的梯度 tensor([[-0.1011,  0.0315, -0.0835,  0.0449,  0.1453],
        [ 0.0412,  0.1652, -0.0883,  0.1045,  0.0394],
        [ 0.0412,  0.1652, -0.0883,  0.1045,  0.0394],
        [-0.0556,  0.1741, -0.0991, -0.1435,  0.0262],
        [-0.0556,  0.1741, -0.0991, -0.1435,  0.0262],
        [-0.1589,  0.1047,  0.0795, -0.0553, -0.2979],
        [-0.1589,  0.1047,  0.0795, -0.0553, -0.2979],
        [-0.1750, -0.0488,  0.0351, -0.0640,  0.0398]])
手动计算的提取 tensor([[-0.1011,  0.0315, -0.0835,  0.0449,  0.1453],
        [ 0.0412,  0.1652, -0.0883,  0.1045,  0.0394],
        [-0.0556,  0.1741, -0.0991, -0.1435,  0.0262],
        [-0.1589,  0.1047,  0.0795, -0.0553, -0.2979],
        [-0.1750, -0.0488,  0.0351, -0.0640,  0.0398]], grad_fn=<DivBackward0>)
x的梯度 tensor([[-0.2022,  0.0630, -0.1670,  0.0899,  0.2906],
        [ 0.0824,  0.3305, -0.1767,  0.2090,  0.0789],
        [-0.1112,  0.3482, -0.1981, -0.2869,  0.0524],
        [-0.3178,  0.2094,  0.1590, -0.1106, -0.5957],
        [-0.3501, -0.0975,  0.0703, -0.1281,  0.0796]])
手动计算的提取 tensor([[-0.1011,  0.0315, -0.0835,  0.0449,  0.1453],
        [ 0.0412,  0.1652, -0.0883,  0.1045,  0.0394],
        [-0.0556,  0.1741, -0.0991, -0.1435,  0.0262],
        [-0.1589,  0.1047,  0.0795, -0.0553, -0.2979],
        [-0.1750, -0.0488,  0.0351, -0.0640,  0.0398]], grad_fn=<DivBackward0>)


'''

'''
梯度清零的情况下

x tensor([[-0.7203,  0.0891, -0.8713, -1.1697, -0.8645],
        [ 1.4741, -0.2939,  0.1397,  0.5953, -0.2834],
        [ 0.8024, -0.5344, -0.7923, -1.0188,  2.6049],
        [ 0.1821, -1.4956,  0.1379, -0.8129, -0.3709],
        [-0.5830,  0.5912, -0.4974,  0.7314,  0.5703]], requires_grad=True)
y tensor([[-0.7203,  0.0891, -0.8713, -1.1697, -0.8645],
        [ 1.4741, -0.2939,  0.1397,  0.5953, -0.2834],
        [ 0.8024, -0.5344, -0.7923, -1.0188,  2.6049],
        [ 0.1821, -1.4956,  0.1379, -0.8129, -0.3709],
        [-0.5830,  0.5912, -0.4974,  0.7314,  0.5703]], requires_grad=True)
x的梯度 tensor([[-0.0260, -0.2063, -0.0845, -0.1621, -0.0350],
        [ 0.3085, -0.0123,  0.0610,  0.0061, -0.0414],
        [-0.0602,  0.0288, -0.1636, -0.0766,  0.1328],
        [-0.1243, -0.1305,  0.0350, -0.0814, -0.0493],
        [-0.0012,  0.0709,  0.1134,  0.1930,  0.1061]])
手动计算的提取 tensor([[-0.0260, -0.2063, -0.0845, -0.1621, -0.0350],
        [ 0.3085, -0.0123,  0.0610,  0.0061, -0.0414],
        [-0.0602,  0.0288, -0.1636, -0.0766,  0.1328],
        [-0.1243, -0.1305,  0.0350, -0.0814, -0.0493],
        [-0.0012,  0.0709,  0.1134,  0.1930,  0.1061]], grad_fn=<DivBackward0>)
x的梯度 tensor([[-0.0260, -0.2063, -0.0845, -0.1621, -0.0350],
        [ 0.3085, -0.0123,  0.0610,  0.0061, -0.0414],
        [-0.0602,  0.0288, -0.1636, -0.0766,  0.1328],
        [-0.1243, -0.1305,  0.0350, -0.0814, -0.0493],
        [-0.0012,  0.0709,  0.1134,  0.1930,  0.1061]])
手动计算的提取 tensor([[-0.0260, -0.2063, -0.0845, -0.1621, -0.0350],
        [ 0.3085, -0.0123,  0.0610,  0.0061, -0.0414],
        [-0.0602,  0.0288, -0.1636, -0.0766,  0.1328],
        [-0.1243, -0.1305,  0.0350, -0.0814, -0.0493],
        [-0.0012,  0.0709,  0.1134,  0.1930,  0.1061]], grad_fn=<DivBackward0>)
'''