from matplotlib import pyplot as plt
from mxnet import autograd, nd
import random

# initialize model parameter
num_in = 2
exam = 1000
ture_w = [2, -3.4]  # weight
ture_b = 4.2  # bias
x = nd.random.normal(shape=(exam, num_in))
y = ture_w[0] * x[:, 0] + ture_w[1] * x[:, 1] + ture_b
y += nd.random.normal(scale=0.01, shape=y.shape)  # noise Interference parameter
w = nd.random.normal(scale=0.01, shape=(num_in, 1))
b = nd.zeros(1)
# create gradient space
w.attach_grad()
b.attach_grad()


def linereg(X, w, b):
    '''
    define model
    - parameters:
      - X: array or list array 
      - w: keep the same shape for X to make matrix multiply 
      - b: number or the same shape with X
    - return a Linear regression equation
    '''
    return nd.dot(X, w) + b


# define cost function
def squared_loss(y_pred, y):
    return (y_pred - y.reshape(y_pred.shape)) ** 2 / 2  # ensure both of the two shape is the same


# define algorithm
def sgd(params, lr):
    for param in params:
        param[:] = param - lr * param.grad


# read mini-batch data set
def data_iter(size, features, labels):
    num = len(features)
    indices = list(range(num))
    random.shuffle(indices)  # shuffle the list
    for i in range(0, num, size):
        j = nd.array(indices[i:min(i + size, num)])  # turn to NDarray type
        yield features.take(j), labels.take(j)  # take function return element by index,parameter requires matrix


def show_plot():
    plt.rcParams['figure.figsize'] = (3.5, 2.5)
    plt.scatter(x[:, 1].asnumpy(), y.asnumpy(), 1)  # data 1 control the scale of ploted dot
    plt.show()


lr = 0.003  # set learning rate
epochs = 3  # iteration cycle
net = linereg
loss = squared_loss
batch_size = 10

for epoch in range(epochs):
    for x1, y1 in data_iter(batch_size, x, y):
        with autograd.record():
            l = loss(net(x1, w, b), y1)
        l.backward()  # the influence by mini-batch loss
        sgd([w, b], lr)
    train_l = loss(net(x, w, b), y)
    print('epoch %d,loss %f' % (epoch + 1, train_l.mean().asnumpy()))
    break
print(ture_b, b)
print(ture_w, w)
show_plot()
