import torch
import torch.optim as optim

def on_tensor(f):
    return lambda x: f(torch.as_tensor(x))

@on_tensor
def f(x):
    return (x - 5).square()

x = torch.tensor(0, dtype=torch.double, requires_grad=True)
y = f(x)
y.backward()
optimizer = optim.SGD((x,), lr=0.01, momentum=0.01, dampening=0.01, weight_decay=0.8)
optimizer.step()
