'''
@File  : ex5.py
@Author: Jiaojiao
@Date  : 2021/2/5 13:13
@Desc  : 方差和偏差、学习曲线
'''


import numpy as np
import scipy.io as sio
import scipy.optimize as opt
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns


# 读取数据
def load_data(path):
    d = sio.loadmat(path)
    return map(np.ravel, [d['X'], d['y'], d['Xval'], d['yval'], d['Xtest'], d['ytest']])

# 代价函数
def cost(theta, X, y):
    m = X.shape[0]
    inner = X @ theta - y
    cost = (inner.T @ inner) / (2*m)
    return cost

# 正则化代价函数
def reg_cost(theta, X, y, l=1):
    m = X.shape[0]
    return cost(theta, X, y) + (l / (2*m)) * np.power(theta[1:], 2).sum()

# 梯度下降
def gradient(theta, X, y):
    return X.T @ (X @ theta - y)/X.shape[0]

# 正则化梯度
def reg_gradient(theta, X, y, l=1):
    m = X.shape[0]
    t0 = theta.copy()
    t0[0] = 0
    return gradient(theta, X, y) + (l / m) * t0

# 拟合数据
def linear_reg(X, y, l=1):
    theta = np.random.rand(X.shape[1])
    res = opt.minimize(fun=reg_cost,
                       x0=theta,
                       args=(X, y, l),
                       method='TNC',
                       jac=reg_gradient,
                       options={'disp': True})
    return res

# 绘制学习曲线
def plot_learningcurve(X, y, Xval, yval):
    m = X.shape[0]
    training_cost, cv_cost = [], []
    for i in range(1, 1+m):
        res = linear_reg(X[:i, :], y[:i], l=0)
        tc = reg_cost(res.x, X[:i, :], y[:i], l=0)
        cv = reg_cost(res.x, Xval, yval, l=0)
        training_cost.append(tc)
        cv_cost.append(cv)
    plt.plot(np.arange(1, m + 1), training_cost, label='training cost')
    plt.plot(np.arange(1, m + 1), cv_cost, label='cv cost')
    plt.legend(loc=1)
    plt.show()

# 特征缩放
def normalize_feature(df):
    return df.apply(lambda column: (column - column.mean()) / column.std())

# 增加多项式特征
def poly_features(x, power, as_ndarray=False):
    data = {'f{}'.format(i): np.power(x, i) for i in range(1, power + 1)}
    df = pd.DataFrame(data)
    return df.as_matrix() if as_ndarray else df

# 创建特征缩放后的多项式特征
def poly_data(*args, power):
    def prepare(x):
        df = poly_features(x, power=power)
        ndarr = normalize_feature(df).values
        return np.insert(ndarr, 0, np.ones(ndarr.shape[0]), axis=1)
    return [prepare(x) for x in args]

# 绘制training_cost和cv_cost随l变化的曲线
def plot_ltccv_curve(l_candidate, X_poly, y, Xval_poly, yval):
    X_pe, Xval_pe = [np.insert(x.reshape(x.shape[0], 1), 0, np.ones(x.shape[0]), axis=1) for x in (X_poly, Xval_poly)]
    training_cost, cv_cost = [], []
    for l in l_candidate:
        res = linear_reg(X_pe, y)
        tc = cost(res.x, X_pe, y)
        cv = cost(res.x, Xval_pe, yval)
        training_cost.append(tc)
        cv_cost.append(cv)
    plt.plot(l_candidate, training_cost, label='training')
    plt.plot(l_candidate, cv_cost, label='cross validation')
    plt.legend(loc=2)
    plt.xlabel('lambda')
    plt.ylabel('cost')
    plt.xticks(np.array(l_candidate))
    plt.show()


def main():
    # 读取数据
    X, y, Xval, yval, Xtest, ytest = load_data('ex5data1.mat')
    for x in [X, y, Xval, yval, Xtest, ytest]:
        print(x.shape)
    # 绘出训练集散点图
    df = pd.DataFrame({'water_level': X, 'flow': y})
    sns.lmplot('water_level', 'flow', data=df, fit_reg=False, size=7)
    plt.show()
    # 梯度下降，线性回归
    X_e, Xval_e, Xtest_e = [np.insert(x.reshape(x.shape[0], 1), 0, np.ones(x.shape[0]), axis=1) for x in (X, Xval, Xtest)]
    res = linear_reg(X_e, y, l=1)
    theta = res.x
    # 绘制拟合曲线图
    plt.scatter(X, y, label="Training data")
    plt.plot(X, X * theta[1] + theta[0], label="Prediction")
    plt.legend(loc=2)
    plt.show()
    plot_learningcurve(X, y, Xval, yval)
    # # 多项式回归数据
    # X_poly, Xval_poly, Xtest_poly = poly_data([X, Xval, Xtest], power=8)
    # # 找最佳的l
    # l_candidate = [0, 0.001, 0.003, 0.01, 0.03, 0.1, 0.3, 1, 3, 10]
    # plot_ltccv_curve(l_candidate, X_poly, y, Xval_poly, yval)


main()