#李宏毅机器学习
#梯度下降法实战，AdaGrad算法

# 导入工具包
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
# 生成数据
xdata = [338.,333.,328.,207.,226.,25.,179.,60.,208.,606.]
ydata = [640.,633.,619.,393.,428.,27.,193.,66.,226.,1591.]
# ydata = b + w * xdata

blst = np.arange(-200, -100, 1)
wlst = np.arange(-5, 5, 0.1)
z = np.zeros((len(blst), len(wlst)))
for i in range(len(wlst)):
    for j in range(len(blst)):
        for k in range(len(xdata)):
            z[i][j] += (ydata[k] - blst[j] - wlst[i] * xdata[k]) ** 2
        z[i][j] /= len(xdata)

# initial parameters
b, w, eta, iterations = -120, -4, 1, 100000
bhist, whist = [b], [w]
bsigma, wsigma = 0.0, 0.0

for i in range(iterations):
    bgrad, wgrad = 0.0, 0.0
    for k in range(len(xdata)):
        bgrad += - 2.0 * (ydata[k] - b - w * xdata[k])
        wgrad += - 2.0 * (ydata[k] - b - w * xdata[k]) * xdata[k]
    bsigma += bgrad ** 2
    wsigma += wgrad ** 2
    # update parameters
    b += - eta / np.sqrt(bsigma) * bgrad
    w += - eta / np.sqrt(wsigma) * wgrad
    # store parameters for plotting
    bhist.append(b)
    whist.append(w)

# plot the figure
plt.contourf(blst, wlst, z, 50, alpha = 0.5, cmap = plt.get_cmap('jet'))
plt.plot([-188.4], [2.67], 'x', ms = 12, markeredgewidth = 3, color = 'orange')
plt.plot(bhist, whist, 'o-', ms = 3, lw = 1.5, color = 'black')
plt.xlim(-200, -100)
plt.ylim(-5, 5)
plt.xlabel(r'$b$', fontsize = 16)
plt.ylabel(r'$w$', fontsize = 16)
plt.show()


