import torch
import torch.nn
import torch.optim as optim


fc1=torch.nn.Linear(7,1)
optimizer = optim.Adadelta(fc1.parameters(), lr=0.01)

x=torch.tensor([1.,2.,1.,3.,5.,2.,4.])
y=torch.tensor([0.])
fc1_out=fc1(x)
#y_=torch.nn.functional.softmax(fc1_out)
y_=fc1_out

optimizer.zero_grad()
# loss=(y-y_).sum()
loss=(y-y_).square().sum()
loss.backward()
optimizer.step()

# print(fc1.weight.grad)
