import numpy as np
import matplotlib.pyplot as plt
from scipy.io import loadmat
from scipy.optimize import minimize

data = loadmat('data/ex5data1.mat')
# print(f"data.keys:{data.keys()}")
# dict_keys(['__header__', '__version__', '__globals__', 'X', 'y', 'Xtest', 'ytest', 'Xval', 'yval'])
# 训练集
X_train, y_train = data['X'], data['y']
X_train = np.insert(X_train, 0, values=1, axis=1)


# 验证集
# X_val, y_val = data['Xval'], data['yval']
# 测试集
# X_test, y_test = data['Xtest'], data['ytest']


# 数据可视化
def ex5_plot(X, y):
    fig, ax = plt.subplots(figsize=(5, 5))
    ax.scatter(X[:, 1:], y)
    ax.set(xlabel='water level',
           ylabel='Dam Water yield')


# 带正则的线性回归
def reg_cost(theta, X, y, lam):
    cost = np.sum(np.power(X @ theta - y.flatten(), 2))
    reg = np.sum(np.power(theta[1:], 2)) * lam
    return (cost + reg) / (2 * len(X))


# 带正则的线性回归的梯度下降
def reg_gradient(theta, X, y, lam):
    grad = (X @ theta - y.flatten()) @ X
    reg = lam * theta
    reg[0] = 0
    return (grad + reg) / len(X)


# ex5_plot(X_train, y_train)

# cost_train = reg_cost(X_train, y_train, theta1, lam1)
# print(cost_train)


def train_model(X, y, lam):
    theta = np.ones(X.shape[1])
    res = minimize(
        fun=reg_cost,
        x0=theta,
        args=(X, y, lam),
        method='TNC',
        jac=reg_gradient,

    )
    return res.x


theta_final = train_model(X_train, y_train, 1)

ex5_plot(X_train, y_train)
plt.plot(X_train[:, 1], X_train@theta_final, c='r')
plt.show()
