from sklearn.tree import ExtraTreeRegressor
from sklearn.ensemble import BaggingRegressor
from sklearn import ensemble
from sklearn import neighbors
from sklearn import svm
from sklearn import linear_model
from sklearn import tree
import numpy as np
import matplotlib.pyplot as plt

###########3.具体方法选择##########
####3.1决策树回归####
model_DecisionTreeRegressor = tree.DecisionTreeRegressor()
####3.2线性回归####
model_LinearRegression = linear_model.LinearRegression()
####3.3SVM回归####
model_SVR = svm.SVR()
####3.4KNN回归####
model_KNeighborsRegressor = neighbors.KNeighborsRegressor()
####3.5随机森林回归####
model_RandomForestRegressor = ensemble.RandomForestRegressor(
    n_estimators=20)  # 这里使用20个决策树
####3.6Adaboost回归####
model_AdaBoostRegressor = ensemble.AdaBoostRegressor(
    n_estimators=50)  # 这里使用50个决策树
####3.7GBRT回归####
model_GradientBoostingRegressor = ensemble.GradientBoostingRegressor(
    n_estimators=100)  # 这里使用100个决策树
####3.8Bagging回归####
model_BaggingRegressor = BaggingRegressor()
####3.9ExtraTree极端随机树回归####
model_ExtraTreeRegressor = ExtraTreeRegressor()

###########1.数据生成部分##########


def f(x1, x2):
    y = 0.5 * np.sin(x1) + 0.5 * np.cos(x2) + 3 + 0.1 * x1
    return y


def load_data():
    x1_train = np.linspace(0, 50, 500)
    x2_train = np.linspace(-10, 10, 500)
    data_train = np.array([[x1, x2, f(x1, x2) + (np.random.random(1)-0.5)]
                           for x1, x2 in zip(x1_train, x2_train)])
    x1_test = np.linspace(0, 50, 100) + 0.5 * np.random.random(100)
    x2_test = np.linspace(-10, 10, 100) + 0.02 * np.random.random(100)
    data_test = np.array([[x1, x2, f(x1, x2)]
                          for x1, x2 in zip(x1_test, x2_test)])
    return data_train, data_test


train, test = load_data()
x_train, y_train = train[:, :2], train[:, 2]  # 数据前两列是x1,x2 第三列是y,这里的y有随机噪声
x_test, y_test = test[:, :2], test[:, 2]  # 同上,不过这里的y没有噪声


###########2.回归部分##########
plt.figure(1,(100,100))
plt.subplot(3, 3, 1)
model_DecisionTreeRegressor.fit(x_train, y_train)
score = model_DecisionTreeRegressor.score(x_test, y_test)
result = model_DecisionTreeRegressor.predict(x_test)
plt.plot(np.arange(len(result)), y_test, 'go-', label='true value')
plt.plot(np.arange(len(result)), result, 'ro-', label='predict value')
plt.title('score: %f' % score)
plt.text(-0.5, 7, 'model_DecisionTreeRegressor', fontsize=10)

plt.figure(1)
plt.subplot(3, 3, 2)
model_LinearRegression.fit(x_train, y_train)
score = model_LinearRegression.score(x_test, y_test)
result = model_LinearRegression.predict(x_test)
plt.plot(np.arange(len(result)), y_test, 'go-', label='true value')
plt.plot(np.arange(len(result)), result, 'ro-', label='predict value')
plt.title('score: %f' % score)
plt.text(-0.5, 7, 'model_LinearRegression', fontsize=10)

plt.figure(1)
plt.subplot(3, 3, 3)
model_SVR.fit(x_train, y_train)
score = model_SVR.score(x_test, y_test)
result = model_SVR.predict(x_test)
plt.plot(np.arange(len(result)), y_test, 'go-', label='true value')
plt.plot(np.arange(len(result)), result, 'ro-', label='predict value')
plt.title('score: %f' % score)
plt.text(-0.5, 7, 'model_SVR', fontsize=10)



plt.figure(1)
plt.subplot(3, 3, 4)
model_KNeighborsRegressor.fit(x_train, y_train)
score = model_KNeighborsRegressor.score(x_test, y_test)
result = model_KNeighborsRegressor.predict(x_test)
plt.plot(np.arange(len(result)), y_test, 'go-', label='true value')
plt.plot(np.arange(len(result)), result, 'ro-', label='predict value')
plt.title('score: %f' % score)
plt.text(-0.5, 7, 'model_KNeighborsRegressor', fontsize=10)


plt.figure(1)
plt.subplot(3, 3, 5)
model_RandomForestRegressor.fit(x_train, y_train)
score = model_RandomForestRegressor.score(x_test, y_test)
result = model_RandomForestRegressor.predict(x_test)
plt.plot(np.arange(len(result)), y_test, 'go-', label='true value')
plt.plot(np.arange(len(result)), result, 'ro-', label='predict value')
plt.title('score: %f' % score)
plt.text(-0.5, 7, 'model_RandomForestRegressor', fontsize=10)

plt.figure(1)
plt.subplot(3, 3, 6)
model_AdaBoostRegressor.fit(x_train, y_train)
score = model_AdaBoostRegressor.score(x_test, y_test)
result = model_AdaBoostRegressor.predict(x_test)
plt.plot(np.arange(len(result)), y_test, 'go-', label='true value')
plt.plot(np.arange(len(result)), result, 'ro-', label='predict value')
plt.title('score: %f' % score)
plt.text(-0.5, 7, 'model_AdaBoostRegressor', fontsize=10)

plt.figure(1)
plt.subplot(3, 3, 7)
model_GradientBoostingRegressor.fit(x_train, y_train)
score = model_GradientBoostingRegressor.score(x_test, y_test)
result = model_GradientBoostingRegressor.predict(x_test)
plt.plot(np.arange(len(result)), y_test, 'go-', label='true value')
plt.plot(np.arange(len(result)), result, 'ro-', label='predict value')
plt.title('score: %f' % score)
plt.text(-0.5, 7, 'model_GradientBoostingRegressor', fontsize=10)

plt.figure(1)
plt.subplot(3, 3, 8)
model_BaggingRegressor.fit(x_train, y_train)
score = model_BaggingRegressor.score(x_test, y_test)
result = model_BaggingRegressor.predict(x_test)
plt.plot(np.arange(len(result)), y_test, 'go-', label='true value')
plt.plot(np.arange(len(result)), result, 'ro-', label='predict value')
plt.title('score: %f' % score)
plt.text(-0.5, 7, 'model_BaggingRegressor', fontsize=10)

plt.figure(1)
plt.subplot(3, 3, 9)
model_ExtraTreeRegressor.fit(x_train, y_train)
score = model_ExtraTreeRegressor.score(x_test, y_test)
result = model_ExtraTreeRegressor.predict(x_test)
plt.plot(np.arange(len(result)), y_test, 'go-', label='true value')
plt.plot(np.arange(len(result)), result, 'ro-', label='predict value')
plt.title('score: %f' % score)
plt.text(-0.5, 7, 'model_ExtraTreeRegressor', fontsize=10)

plt.show()

