# day6多项式拟合
import numpy as np
import matplotlib.pyplot as plt
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression

def polynomial_regression_demo(degree=2):
    # 构造数据
    X = np.array([[1], [2], [3], [4], [5]])
    y = np.array([2, 4, 6, 8, 10])
    
    # 线性回归拟合（做对比）
    linear_model = LinearRegression()
    linear_model.fit(X, y)
    y_pred_linear = linear_model.predict(X)
    
    # 多项式特征变换
    poly = PolynomialFeatures(degree=degree)
    X_poly = poly.fit_transform(X)
    
    # 多项式回归拟合
    poly_model = LinearRegression()
    poly_model.fit(X_poly, y)
    y_pred_poly = poly_model.predict(X_poly)
    
    # 输出模型参数
    print(f"线性回归系数: {linear_model.coef_}, 截距: {linear_model.intercept_}")
    print(f"{degree}阶多项式回归系数: {poly_model.coef_}, 截距: {poly_model.intercept_}")
    print(f"线性回归R²分数: {linear_model.score(X, y):.3f}")
    print(f"多项式回归R²分数: {poly_model.score(X_poly, y):.3f}")
    
    # 可视化
    plt.figure(figsize=(8, 5))
    plt.scatter(X, y, color='blue', label='原始数据')
    plt.plot(X, y_pred_linear, color='green', linestyle='--', label='线性回归拟合')
    
    # 生成平滑曲线点
    X_fit = np.linspace(X.min(), X.max(), 100).reshape(-1, 1)
    X_fit_poly = poly.transform(X_fit)
    y_fit_poly = poly_model.predict(X_fit_poly)
    plt.plot(X_fit, y_fit_poly, color='red', label=f'{degree}阶多项式回归拟合')
    
    plt.title(f'多项式回归 (degree={degree}) 演示')
    plt.xlabel('X')
    plt.ylabel('y')
    plt.legend()
    plt.grid(True)
    plt.tight_layout()
    plt.show()

if __name__ == "__main__":
    polynomial_regression_demo(degree=2)