import numpy as np
from matplotlib import pyplot as plt
from tensorboardX import SummaryWriter

writer = SummaryWriter('./runs/SGDM')

# 回归问题的梯度下降
# 数据预处理，归一化
data = np.loadtxt(fname='./datasets/linear_data.csv', delimiter='\t')
data -= data.mean(axis=0)
data /= data.std(axis=0)

# 将训练数据和真实结果分开
x = data[:, :-1]
y = data[:, -1]
M, N = x.shape

# 参数初始化
# w = np.random.normal(size=(N,))
w = np.zeros(shape=(N,))
b = 0

# 预测，一个输入，预测回归的结果
def predict(x):
    return w.dot(x) + b

print(f'predict x[0]: {predict(x[0])}, ground truth: {y[0]}')

# 获取一对数据的loss
def get_loss(x, y):
    return (predict(x) - y) ** 2


# 获取总loss，这里取平均比较小，不能体现SGD造成的抖动
def total_loss():
    loss = 0
    for i in range(M):
        loss += get_loss(x[i], y[i])
    return loss

print(total_loss())

# 计算梯度
def get_gradient(x, y):
    global w
    global b
    eps = 1e-3
    gradient_w = np.empty(N)
    loss_before = get_loss(x,y)
    for i in range(N):
        w[i] += eps
        loss_after = get_loss(x,y)
        w[i] -= eps
        gradient_w[i] = (loss_after - loss_before) / eps
    b += eps
    loss_after = get_loss(x,y)
    gradient_b = (loss_after - loss_before) / eps
    b -= eps
    return  gradient_w, gradient_b



hyper_dict = {
    'epochs': 2500,
    'lr': 1e-3,
    'beta': 0.9,
    'kind': 'both'
}
x_v = [i for i in range(hyper_dict['epochs'])]
y_v1 = []
y_v2 = []

momentum_w = np.zeros(shape=(N,))
momentum_b = 0

if hyper_dict['kind'] == 'vanilla':
    for epoch in range(hyper_dict['epochs']):
        i = np.random.randint(M)
        # 求梯度
        gradient_w, gradient_b = get_gradient(x[i], y[i])
        # 求动量
        momentum_w = hyper_dict['beta'] * momentum_w + (1-hyper_dict['beta']) * gradient_w
        momentum_b = hyper_dict['beta'] * momentum_b + (1-hyper_dict['beta']) * gradient_b
        # 更新梯度
        w -= hyper_dict['lr'] * momentum_w
        b -= hyper_dict['lr'] * momentum_b
        y_v1.append(total_loss())
        if epoch % 100 == 0:
            print(f'total loss at {epoch}epoch: {total_loss()}')
    plt.plot(x_v, y_v1)
    plt.xlabel('epochs')
    plt.ylabel('total loss')
elif hyper_dict['kind'] == 'modern':
    for epoch in range(hyper_dict['epochs']):
        i = np.random.randint(M)
        # 求梯度
        gradient_w, gradient_b = get_gradient(x[i], y[i])
        # 求动量
        momentum_w = hyper_dict['beta'] * momentum_w + hyper_dict['lr'] * gradient_w
        momentum_b = hyper_dict['beta'] * momentum_b + hyper_dict['lr'] * gradient_b
        # 更新梯度
        w -= momentum_w
        b -= momentum_b
        y_v2.append(total_loss())
        if epoch % 100 == 0:
            print(f'total loss at {epoch}epoch: {total_loss()}')
    plt.plot(x_v, y_v2)
    plt.xlabel('epochs')
    plt.ylabel('total loss')
elif hyper_dict['kind'] == 'both':
    for epoch in range(hyper_dict['epochs']):
        i = np.random.randint(M)
        # 求梯度
        gradient_w, gradient_b = get_gradient(x[i], y[i])
        # 求动量
        momentum_w = hyper_dict['beta'] * momentum_w + (1 - hyper_dict['beta']) * gradient_w
        momentum_b = hyper_dict['beta'] * momentum_b + (1 - hyper_dict['beta']) * gradient_b
        # 更新梯度
        w -= hyper_dict['lr'] * momentum_w
        b -= hyper_dict['lr'] * momentum_b
        y_v1.append(total_loss())
        if epoch % 500 == 0:
            print(f'total loss at {epoch}epoch: {total_loss()}')
    momentum_w = np.zeros(shape=(N,))
    momentum_b = 0
    w = np.zeros(shape=(N,))
    b = 0
    for epoch in range(hyper_dict['epochs']):
        i = np.random.randint(M)
        # 求梯度
        gradient_w, gradient_b = get_gradient(x[i], y[i])
        # 求动量
        momentum_w = hyper_dict['beta'] * momentum_w + hyper_dict['lr'] * gradient_w
        momentum_b = hyper_dict['beta'] * momentum_b + hyper_dict['lr'] * gradient_b
        # 更新梯度
        w -= momentum_w
        b -= momentum_b
        y_v2.append(total_loss())
        writer.add_scalar('total loss', total_loss(), epoch)
        if epoch % 500 == 0:
            print(f'total loss at {epoch}epoch: {total_loss()}')
    plt.plot(x_v, y_v1, c='r')
    plt.plot(x_v, y_v2, c='b')
    plt.legend(['SGD1', 'SGD2'])
    plt.xlabel('epochs')
    plt.ylabel('total loss')
plt.show()