import numpy as np
import torch as pt
import matplotlib.pyplot as plt

np.random.seed(777)
pt.manual_seed(777)

ALPHA = 0.001
N_EPOCHS = 1000

# data
W = 3.5
B = -5.7
x = np.linspace(-3, 7, 11)
M = len(x)
y = W * x + B + np.random.normal(0, 3, size=(M))
x = x.reshape((-1, 1))
xt = pt.Tensor(x)
y = y.reshape((-1, 1))
yt = pt.Tensor(y)
plt.scatter(x, y)

# model
model = pt.nn.Linear(in_features=1, out_features=1, bias=True)
criterion = pt.nn.MSELoss()
params = model.parameters()
optim = pt.optim.SGD(params=params, lr=ALPHA)

# train
loss_history = np.zeros(N_EPOCHS)
GROUP = int(np.ceil(N_EPOCHS / 20))
for step in range(N_EPOCHS):
    optim.zero_grad()  # ATTENTION
    ht = model(xt)
    cost = criterion(ht, yt)
    cost.backward()
    optim.step()
    loss_history[step] = cost.data.numpy()
    if step % GROUP == 0:
        print(f'#{step + 1}: cost = {cost.data.numpy()}')
if step % GROUP != 0:
    print(f'#{step + 1}: cost = {cost.data.numpy()}')
print(f'Weights: {model.weight}')
print(f'Bias: {model.bias}')

# hypothesis
xlimr = np.array([x.min(), x.max()])
xlim = xlimr.reshape((-1, 1))
xlimt = pt.Tensor(xlim)
with pt.no_grad():
    ht = model(xlimt)
print(ht.shape)
plt.plot(xlim, ht, 'r-')

plt.show()
