import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression, Lasso, Ridge
from sklearn.metrics import mean_squared_error

# 1. 生成X，y数据
# 2. 使用train_test_split划分训练集和测试集
# 3. 使用线性回归模型进行训练
# 4. 预测测试集结果
# 5. 计算均方误差
# 6. 可视化结果

def polynomial(x, degree):
    return np.hstack([x ** i for i in range(1, degree + 1)])

plt.rcParams["font.sans-serif"] = ["SimHei"]  # 设置中文字体为黑体
plt.rcParams["axes.unicode_minus"] = False

# 1. 生成X，y数据
X = np.linspace(-3, 3, 300).reshape(-1, 1)
y = np.sin(X) + np.random.uniform(-0.5, 0.5, X.shape).reshape(-1, 1)

fig, ax = plt.subplots(2, 3,  figsize=(15, 4))
ax[0, 0].plot(X, y, "yo")
ax[0, 1].plot(X, y, "yo")
ax[0, 2].plot(X, y, "yo")

# 2. 使用train_test_split划分训练集和测试集
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)

# 过拟合
# 线性回归
model = LinearRegression()
X_train3 = polynomial(X_train, degree=20)
model.fit(X_train3, y_train)
X_test3 = polynomial(X_test, degree=20)
y_pred = model.predict(X_test3)
mse = mean_squared_error(y_test, y_pred)
ax[0, 0].plot(X, model.predict(polynomial(X, degree=20)), color='r')
ax[0, 0].text(-3, 1, f'测试集MSE={mse:.4f}')
ax[0, 0].set_title("线性回归")
ax[1, 0].bar(np.arange(20), model.coef_.reshape(-1))

# Lasso回归
model = Lasso(alpha=0.01)
model.fit(X_train3, y_train)
y_pred = model.predict(X_test3)
mse = mean_squared_error(y_test, y_pred)
ax[0, 1].plot(X, model.predict(polynomial(X, degree=20)), color='r')
ax[0, 1].text(-3, 1, f'测试集MSE={mse:.4f}')
ax[0, 1].set_title("Lasso回归")
ax[1, 1].bar(np.arange(20), model.coef_.reshape(-1))

# Ridge回归
model = Ridge(alpha=1.0)
model.fit(X_train3, y_train)
y_pred = model.predict(X_test3)
mse = mean_squared_error(y_test, y_pred)
ax[0, 2].plot(X, model.predict(polynomial(X, degree=20)), color='r')
ax[0, 2].text(-3, 1, f'测试集MSE={mse:.4f}')
ax[0, 2].set_title("Ridge回归")
ax[1, 2].bar(np.arange(20), model.coef_.reshape(-1))
plt.show()