﻿import numpy as np
import matplotlib.pyplot as plt
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression

np.random.seed(42)

# 数据
m = 100
X = 6*np.random.rand(m, 1)-3 # 产生100个-3到3之间的随机数
y = 0.5*X**2+X+np.random.randn(m, 1)

# plt.plot(X, y, 'b.')
# plt.xlabel("x_1")
# plt.ylabel("y")
# plt.axis([-3, 3, -5, 10])
# plt.show()

poly_features = PolynomialFeatures(degree=2, include_bias=False) # [X] => [X, X^2]
X_poly = poly_features.fit_transform(X) # 计算:fit, 数据打包传回:transform
print(X[0]) # [X]
print(X[0]**2)
print(X_poly[0]) # [X, X^2]

lin_reg = LinearRegression()
lin_reg.fit(X_poly, y)
print(lin_reg.coef_) # [[0.93366893 0.56456263]]
print(lin_reg.intercept_) # [-0.21865419]
# y = 0.93366893x_1 + 0.56456263x_1^2 - 0.21865419

X_new = np.linspace(-3, 3, 100).reshape(100, 1)
X_new_poly = poly_features.transform(X_new) # 已经fit过了(已计算)，此时只需transform即可
y_new = lin_reg.predict(X_new_poly)

plt.plot(X, y, 'b.')
plt.plot(X_new, y_new, 'r--', linewidth=2, label='Prediction')
plt.xlabel("x_1")
plt.ylabel("y")
plt.axis([-3, 3, -5, 10])
plt.legend()
plt.show()