import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
from sklearn.datasets import make_regression
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression, SGDRegressor, Ridge, Lasso, ElasticNet
from sklearn.metrics import mean_squared_error
from sklearn.preprocessing import StandardScaler

def createData():
    import numpy as np
    import matplotlib.pyplot as plt

    # 设置参数
    a = 0.1  # 控制螺旋的松紧程度
    theta = np.linspace(0, 8 * np.pi, 1000)  # 角度范围：0 到 8π，共1000个点

    # 阿基米德螺线公式
    r = a * theta

    # 极坐标转笛卡尔坐标
    x = r * np.cos(theta)
    y = r * np.sin(theta)

    # # 绘图
    plt.figure(figsize=(6, 6))
    plt.plot(x, y, color='blue', linewidth=2)
    plt.title('海螺图形 - 阿基米德螺线')
    plt.axis('equal')  # 确保x和y轴比例一致
    plt.axis('off')  # 关闭坐标轴
    plt.show()
    return x,y

# 设置随机种子以确保结果可复现
def creatRandom():
    np.random.seed(42)

    # 1. 生成随机数据集
    X, y = make_regression(n_samples=1000, n_features=20, noise=0.1, random_state=42)
    return X,y

if __name__ == '__main__':
    X,y = createData()
    exit()
    #X,y = creatRandom()
    # 2. 将数据集分为训练集和测试集
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)

    # 3. 数据标准化（对梯度下降等方法很重要）
    scaler = StandardScaler()
    X_train_scaled = scaler.fit_transform(X_train.reshape(-1,1))
    X_test_scaled = scaler.transform(X_test.reshape(-1,1))

    # 4. 创建和训练不同模型
    models = {
        "Normal Equation": LinearRegression(),
        "Gradient Descent": SGDRegressor(max_iter=1000, tol=1e-3, random_state=42),
        "Ridge": Ridge(alpha=1.0),
        "LASSO": Lasso(alpha=0.1),
        "Elastic Net": ElasticNet(alpha=0.1, l1_ratio=0.5)
    }

    # 存储结果
    results = []

    # 训练模型并评估
    for name, model in models.items():
        # 特别处理标准化后的数据
        if name == "Gradient Descent":
            model.fit(X_train_scaled, y_train)
            y_pred = model.predict(X_test_scaled)
        else:
            model.fit(X_train, y_train)
            y_pred = model.predict(X_test)

        mse = mean_squared_error(y_test, y_pred)
        results.append({"Model": name, "MSE": round(mse, 2)})

    # 可视化：绘制真实值 vs 预测值
    fig, axes = plt.subplots(2, 3, figsize=(18, 12))
    axes = axes.flatten()

    for i, (name, model) in enumerate(models.items()):
        if name == "Gradient Descent":
            y_pred = model.predict(X_test_scaled)
        else:
            y_pred = model.predict(X_test)

        axes[i].scatter(y_test, y_pred, alpha=0.6, color='b')
        axes[i].plot([y.min(), y.max()], [y.min(), y.max()], 'r--')  # 对角线表示完美预测
        axes[i].set_title(f'{name}\nMSE: {results[i]["MSE"]}')
        axes[i].set_xlabel("True Values")
        axes[i].set_ylabel("Predictions")

    # 删除最后一个空子图
    fig.delaxes(axes[-1])

    plt.tight_layout()
    plt.show()

    # 5. 创建结果表格
    results_df = pd.DataFrame(results)

    # 打印结果表格
    print("模型比较结果：")
    print(results_df.to_string(index=False))

    # 可选：打印各模型的系数
    print("\n模型系数：")
    for name, model in models.items():
        if hasattr(model, 'coef_'):
            print(f"{name} 系数: {np.round(model.coef_, 2)}")
        else:
            print(f"{name} 系数: 无法访问")