import numpy as np
import matplotlib.pyplot as plt
from python_ai.ML.lin_regression.xlib import *
from scipy.optimize import curve_fit


def func(x):
    return 2.5 + 0.7 * x


np.random.seed(1)
m = 20

Xarr = np.linspace(1, m, m)
X = np.c_[np.ones([m, 1]), np.array(Xarr).reshape(m, 1)]
y = np.array(func(Xarr) + np.random.normal(0, 1, m) * 2).reshape(m, 1)

num_iters = 10000
theata, history, xscores = gradient_descent_algorithm(X, y, num_iters=num_iters)
print(theata)


def func_fit(x, theata):
    y = theata[0] + theata[1] * x
    return y


def func4curve_fit(x, a, b):
    return a + b * x

plt.ioff()
plt.figure(figsize=[12, 8])
pr = 2
pc = 2
plt.rcParams['font.sans-serif'] = ['Simhei']  # 设置中文字体为黑体
plt.rcParams['axes.unicode_minus'] = False  # 显示负号

plt.subplot(pr, pc, 1)
plt.title('梯度下降算法')
plt.scatter(Xarr, y)
plt.plot(Xarr, func_fit(Xarr, theata), 'r-', label='梯度下降线性回归')
popt, popv = curve_fit(func4curve_fit, Xarr, y.ravel())
plt.plot(Xarr, func_fit(Xarr, popt), 'b-', label='curve fit')
plt.plot(Xarr, func(Xarr), 'g--', label='real func')
plt.legend()
plt.grid()

plt.subplot(pr, pc, 2)

xx = range(num_iters)
plt.plot(xx, history)
plt.grid()

plt.subplot(pr, pc, 3)
xx = range(num_iters // 2, num_iters)
plt.plot(xx, history[num_iters // 2:])
plt.grid()

plt.subplot(pr, pc, 4)
xx = range(num_iters)
plt.plot(xx, xscores)
plt.grid()

plt.show()
