import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures

def runplt():   #绘图函数
    plt.figure()# 定义figure
    plt.title('Pizza Inch and Price')
    plt.xlabel('Inch')
    plt.ylabel('Price')
    plt.axis([0, 25, 0, 25])
    plt.grid(True)
    return plt

#定义训练集和测试集
X_train = [[6], [8], [10], [14], [18]]
y_train = [[7], [9], [13], [17.5], [18]]
X_test = [[7], [9], [11], [15]]
y_test = [[8], [12], [15], [18]]

plt = runplt()
plt.scatter(X_train, y_train,s=40)   #画出散点图
plt.show()

#线性回归
xx = np.linspace(0, 26, 5) # 给出一些点
regressor = LinearRegression() # 创建模型
regressor.fit(X_train, y_train) # 训练
yy = regressor.predict(xx.reshape(xx.shape[0], 1)) #预测

plt = runplt()
plt.scatter(X_train, y_train, s=40, label='orginal')  #画出线性回归的曲线
plt.plot(xx, yy, 'g-', label='linear equation')
plt.legend(loc='upper left')
plt.show()

#二次回归
#生成多项式特征
quadratic_featurizer = PolynomialFeatures(degree=2)#生成二项式特征
X_train_quadratic = quadratic_featurizer.fit_transform(X_train)#将X_train中的特征转换为二项式特征

regressor_quadratic = LinearRegression()
regressor_quadratic.fit(X_train_quadratic, y_train)#训练
#画出多项式回归的曲线
xx = np.linspace(0, 26, 5)  #生成一些数
xx_quadratic = quadratic_featurizer.transform(xx.reshape(xx.shape[0], 1))
plt = runplt()
plt.scatter(X_train, y_train, s=40, label='orginal')#绘制多项式回归曲线
plt.plot(xx, yy, 'g-', label='linear equation')
plt.plot(xx, regressor_quadratic.predict(xx_quadratic), 'r--',label="quadratic equation")
plt.legend(loc='upper left')
plt.show()

#R-squared
X_test_quadratic = quadratic_featurizer.transform(X_test)#将测试集特征数据抓换为二项式特征
print('liner equation r-squared', regressor.score(X_test,y_test))  #线性回归R-squared
print('quadratic equation r-quared', regressor_quadratic.score(X_test_quadratic,y_test))  #二次回归R-squared