# 正则化 L2范数，岭回归
# 公式法，直接构造求解公式

import numpy as np
import matplotlib.pyplot as plt
import sklearn.model_selection as ms
from sklearn.metrics import mean_squared_error

#也可以用 岭回归函数
#from sklearn.linear_model import Ridge    #岭回归函数

plt.rcParams['font.sans-serif']=['SimHei'] #用来正常显示中文标签
plt.rcParams['axes.unicode_minus']=False #用来正常显示负


reg_lambda = 0   # 正规化参数 L2 依次改为0，, 0.01, 0.05, 0.1 4种情况
ratio = 0.3
nums = 101
x_data = np.linspace(-1, 1, nums).reshape(-1, 1)



num_coeffs = 8
y_data = x_data ** 2

seed = 2021
np.random.seed(seed)
y_data += np.random.randn(*x_data.shape) * 0.3

print(y_data.shape)
print(x_data.shape)
x_train, x_test, y_train, y_test = ms.train_test_split(x_data, y_data, test_size=ratio, random_state=seed)
# print(x_train.shape)
# print(x_test.shape)
plt.figure(0)
plt.scatter(x_train, y_train, label='训练集')
plt.scatter(x_test, y_test, c='r', label='测试集')



# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　求解
x0 = np.ones_like(x_train)
x1 = x_train
x2 = x_train ** 2
x3 = x_train ** 3
x4 = x_train ** 4
x5 = x_train ** 5
x6 = x_train ** 6
x7 = x_train ** 7
# x8 = x_train ** 8
X = np.hstack((x0, x1, x2, x3, x4, x5, x6, x7))
# X = np.concatenate((x0, x1, x2, x3, x4, x5, x6, x7, x8), axis=1)
rows, cols = X.shape
XX = X.T.dot(X) + reg_lambda * np.identity(cols)  ##加上单位矩阵

aa = np.linalg.inv(XX)
bb = X.T.dot(y_train)
w = aa.dot(bb)   #（8, 1)
print(w, w.shape)

# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　
def model(X, w):             # 模型
    sum = np.zeros_like(X)
    for i in range(num_coeffs):
        sum += w[i] * np.power(X, i) # 累加7个幂次项
    return sum

def loss_fn(x, y, w):        # 损失函数
    y_ = model(x, w)         # 预测值
    aa = np.mean(np.square(y_ - y))
    bb = reg_lambda * np.sum(np.square(w))

    # bb = reg_lambda * tf.reduce_sum(tf.abs(w))       # L1规范化
    return aa + bb

def loss_fn2(x, y, w):      # 均方误差
    y_ = model(x, w)
    return np.mean(np.square(y_ - y))


# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　# #　绘图
xx = np.linspace(-1, 1, nums * 20)
y_pred = model(xx, w)
loss = loss_fn2(x_test, y_test, w) #计算均方误差
y_true = xx ** 2
# 测试集的绘图
plt.plot(xx, y_true, c='k', label='true')
plt.plot(xx, y_pred, c='r', label='final')
plt.legend(loc='lower right')
plt.title('测试集结果 $\lambda$ = %.3f, Cost = %.6f' % (reg_lambda, loss)) # 需要把最后一个字体参数去掉，否则可能报错
plt.ylim(ymax = 1.7)
plt.xlim(xmax = 1.2)


# # # # # # # # # # # # # # # # # # # # # # # # # #
print(loss_fn2(x_test, y_test, w))


plt.show()