import torch

a = torch.tensor([1, 2, 3.], requires_grad=True)
print(f'a.grad= {a.grad}')
out = a.sigmoid()
print(f'out= {out}')
c = out.detach()
print(f'c= {c}')
c.zero_()
print()
print(f'out= {out}')
print(f'c= {c}')

print(f'out.sum= {out.sum()}')

out.sum().backward()
print(f'a.grad= {a.grad}')

