import pandas as pd
import numpy as np
from sklearn.linear_model import LinearRegression, Ridge, Lasso
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt

# 数据准备
data_url = "http://lib.stat.cmu.edu/datasets/boston"
raw_df = pd.read_csv(data_url, sep="\s+", skiprows=22, header=None)
data = np.hstack([raw_df.values[::2, :], raw_df.values[1::2, :2]])
target = raw_df.values[1::2, 2]
x, y = data, target

# 分割训练集和测试集
x_train, x_test, y_train, y_test = train_test_split(x, y, random_state=1, test_size=0.3)

# 定义模型
lr = LinearRegression()
rd = Ridge()
ls = Lasso()
models = [lr, rd, ls]
names = ['Linear', 'Ridge', 'Lasso']

# 训练模型并输出基础预测准确率
print("基础模型预测准确率：")
for model, name in zip(models, names):
    model.fit(x_train, y_train)
    score = model.score(x_test, y_test)
    print(f"{name}模型的预测准确率为：{score:.5f}")

# 测试不同alpha值下的回归效果
scores = []
alphas = [0.0001, 0.0005, 0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1, 5, 10, 50]
for index, model in enumerate(models):
    scores.append([])
    for alpha in alphas:
        if index > 0:  # 线性回归无alpha参数，只调整岭回归和套索回归
            if index == 1:  # 岭回归
                model.set_params(alpha=alpha)
            else:  # 套索回归
                model.set_params(alpha=alpha, max_iter=100000)
        model.fit(x_train, y_train)
        scores[index].append(model.score(x_test, y_test))

# 绘制结果图
fig = plt.figure(figsize=(10, 7))
for i, name in enumerate(names):
    plt.subplot(2, 2, i + 1)
    plt.plot(range(len(alphas)), scores[i], 'g-')
    plt.title(name)
    print(f"{name}模型的最大预测准确率为：{max(scores[i]):.5f}")
plt.show()