import numpy as np
import matplotlib.pyplot as plt
from python_ai.ML.lin_regression.xlib import *


data = np.loadtxt(r'./data/ex1data1.txt', delimiter=',')
m = len(data)
print(m)
print(data[:5])
x = data[:, 0].reshape(m, 1)
y = data[:, 1].reshape(m, 1)
print(x[:5])
print(y[:5])

X = np.c_[np.ones([m, 1]), x]
print(X[:5])
print(x.shape, y.shape, X.shape)

num_iters_ori = num_iters = 15000
theata, history, xscores = gradient_descent_algorithm(X, y, alpher=0.01, num_iters=num_iters)
num_iters = len(history)
if num_iters < num_iters_ori:
    print(f'Converged at {num_iters}th iteration')
else:
    print(f'Not converged after {num_iters} iterations !!!')
print('my implementation')
print(f'THETA = {theata}')
print(f'score = {xscores[-1]}')


def func_fit(x, theata):
    y = theata[0] + theata[1] * x
    return y


plt.ioff()
plt.figure(figsize=[12, 8])
pr = 2
pc = 2
plt.rcParams['font.sans-serif'] = ['Simhei']  # 设置中文字体为黑体
plt.rcParams['axes.unicode_minus'] = False  # 显示负号

plt.subplot(pr, pc, 1)
plt.title('梯度下降算法')
plt.scatter(x, y)
plt.plot(x, func_fit(x, theata), 'r-')
plt.grid()

plt.subplot(pr, pc, 2)

xx = range(num_iters)
plt.plot(xx, history)
plt.grid()

plt.subplot(pr, pc, 3)
xx = range(num_iters // 2, num_iters)
plt.plot(xx, history[num_iters // 2:])
plt.grid()

plt.subplot(pr, pc, 4)
xx = range(num_iters)
plt.plot(xx, xscores)
plt.grid()

plt.show()
