import numpy as np
import matplotlib.pyplot as plt

"""
小批量随机梯度下降实验
我们通过各种方式从原本的梯度样本中抽取一部分，发现无论我们如何抽取，都不会影响最后的收敛。
但是小批量的优势就在于，计算量更少，计算相同的结果，将会提前收敛

讨论一个问题：如果我们不从w入手，而是x特征抽取一部分样本，结果能否相同收敛。
"""

w1 = np.linspace(-1, 1, 30)
w2 = np.linspace(-0.8, 0.8, 30)
# w1_small = w1[::6]
# w2_small = w2[::6]
w1_small = w1[:5]
w2_small = w2[:5]
e = w1 ** 2 + 2 * w2 ** 2
e_small = w1_small ** 2 + 2 * w2_small ** 2

current_w1 = -1
current_w2 = -0.8
current_w1_small = -0.8
current_w2_small = -0.8
# 学习率为常数
lr = 0.1
# 学习率 分别取 0.2、0.02、0.9 学习图线会呈现不一样的状态
num_epochs = 100
predict_w = []
predict_w_small = []
for i in range(num_epochs):
    predict_w.append([current_w1, current_w2])
    predict_w_small.append([current_w1_small, current_w2_small])
    slope1 = 2 * current_w1
    slope2 = 4 * current_w2
    slope1_small = 2 * current_w1_small
    slope2_small = 4 * current_w2_small
    # 梯度噪点
    noise1 = np.random.random()
    noise2 = np.random.random()
    slope1 += noise1
    slope2 += noise1
    slope1_small += noise2
    slope2_small += noise2
    # SGD 小固定值 * 斜率
    current_w1 -= lr * slope1
    current_w2 -= lr * slope2
    current_w1_small -= lr * slope1_small
    current_w2_small -= lr * slope2_small

predict_w = np.array(predict_w)
predict_w_small = np.array(predict_w_small)
x1, x2 = np.meshgrid(w1, w2, indexing='ij')
plt.subplot(121)
plt.contour(x1, x2, x1 ** 2 + 2 * x2 ** 2, colors="#1f77b4")
plt.plot(predict_w[:, 0], predict_w[:, 1], 'ro-')
x3, x4 = np.meshgrid(w1_small, w2_small, indexing='ij')
plt.subplot(122)
plt.contour(x1, x2, x1 ** 2 + 2 * x2 ** 2, colors="#1f77b4")
plt.plot(predict_w_small[:, 0], predict_w_small[:, 1], 'bo-')
plt.show()
