# coding=utf-8
import numpy as np
import matplotlib.pyplot as plt

from matplotlib.colors import ListedColormap
from sklearn.datasets import make_moons  # , make_circles, make_swiss_roll

from sklearn.linear_model import LogisticRegressionCV
from sklearn.neighbors import KNeighborsClassifier
from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier
from xgboost.sklearn import XGBClassifier
from sklearn.naive_bayes import BernoulliNB, GaussianNB
from sklearn.svm import SVC


# 手动生成一个随机的平面点分布，并画出来
np.random.seed(0)
X, y = make_moons(200, noise=0.20)
# X, y = make_swiss_roll(200, noise=0.20)
# print(X)
# print(y)
# exit()
plt.rcParams.update({'figure.autolayout': True})
# plt.scatter(X[:,0], X[:,1], s=40, c=y, cmap=plt.cm.Spectral)
# plt.show()


def plot_decision_boundary(pred_func, clf_in):
    # 设定最大最小值，附加一点点边缘填充
    x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5
    y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5
    h = 0.01

    xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))

    fig = plt.figure(figsize=(16, 8))
    ax_left = fig.add_subplot(121)
    ax_right = fig.add_subplot(122)
    cm_bright = ListedColormap(['#FF6666', '#6666FF'])
    # 用预测函数预测一下
    Z = pred_func(np.c_[xx.ravel(), yy.ravel()])
    Z = Z.reshape(xx.shape)
    ax_left.contourf(xx, yy, Z, cmap=plt.cm.RdBu)  # 然后画出图
    ax_left.scatter(X[:, 0], X[:, 1], c=y, cmap=cm_bright, edgecolors='k')

    # 画出概率范围
    if hasattr(clf_in, "decision_function"):
        Z = clf_in.decision_function(np.c_[xx.ravel(), yy.ravel()])
    else:
        Z = clf_in.predict_proba(np.c_[xx.ravel(), yy.ravel()])[:, 1]
    Z = Z.reshape(xx.shape)
    ax_right.contourf(xx, yy, Z, cmap=plt.cm.RdBu)
    ax_right.scatter(X[:, 0], X[:, 1], c=y, cmap=cm_bright, alpha=1, edgecolors='k')





model_list = [[KNeighborsClassifier(), "KNeighborsClassifier default=5"],
              [KNeighborsClassifier(n_neighbors=3), "KNeighborsClassifier default=3"],
              [GradientBoostingClassifier(), 'GradientBoostingClassifier'],
              [XGBClassifier(), "XGBClassifier"],
              [RandomForestClassifier(), "RandomForestClassifier"],
              [BernoulliNB(), "BernoulliNB"],
              [LogisticRegressionCV(), "Logistic Regression"],
              [GaussianNB(), "GaussianNB"],
              [SVC(), "SVC"]]

# model_list = [[KNeighborsClassifier(n_neighbors=i), 'KNeighborsClassifier default=' + str(i)] for i in range(1, 20)]
# model_list = [[SVC(degree=i), 'SVC degree=' + str(i)] for i in range(1, 20)]


for list_tmp in model_list:
    clf = list_tmp[0]
    clf.fit(X, y)
    plot_decision_boundary(lambda x: clf.predict(x), clf)  # 画一下决策边界
    plt.title(list_tmp[1])
    plt.show()


# https://blog.csdn.net/luanpeng825485697/article/details/78967139


# from sklearn.naive_bayes import MultinomialNB
# clf = MultinomialNB()
# clf.fit(X, y)  # ValueError: Input X must be non-negative
# plot_decision_boundary(lambda x: clf.predict(x), clf)
# plt.title("MultinomialNB ")
# plt.show()




"""
/user/proj_scp/backup/sellin/regression/model_selection_by_sum/-8/2019-02-run-base/20180115_rm_outlier_base_20190202_2019-02-02_173527
/user/proj_scp/backup/sellin/regression/model_selection_by_sum/-8/2019-02-run-base/20180212_rm_outlier_base_20190202_2019-02-02_173527
/user/proj_scp/backup/sellin/regression/model_selection_by_sum/-8/2019-02-run-base/20180312_rm_outlier_base_20190202_2019-02-02_173527
/user/proj_scp/backup/sellin/regression/model_selection_by_sum/-8/2019-02-run-base/20180409_rm_outlier_base_20190202_2019-02-02_173527
/user/proj_scp/backup/sellin/regression/model_selection_by_sum/-8/2019-02-run-base/20180514_rm_outlier_base_20190202_2019-02-02_173527
/user/proj_scp/backup/sellin/regression/model_selection_by_sum/-8/2019-02-run-base/20180611_rm_outlier_base_20190202_2019-02-12_114232
/user/proj_scp/backup/sellin/regression/model_selection_by_sum/-8/2019-02-run-base/20180716_rm_outlier_base_20190202_2019-02-02_173527
/user/proj_scp/backup/sellin/regression/model_selection_by_sum/-8/2019-02-run-base/20180813_rm_outlier_base_20190202_2019-02-02_173527
/user/proj_scp/backup/sellin/regression/model_selection_by_sum/-8/2019-02-run-base/20180910_rm_outlier_base_20190202_2019-02-02_173527
/user/proj_scp/backup/sellin/regression/model_selection_by_sum/-8/2019-02-run-base/20181015_remove_outlier_2019-01-17_122924
/user/proj_scp/backup/sellin/regression/model_selection_by_sum/-8/2019-02-run-base/20181112_rm_outliter_2019-02-19_115908

20180115_1_0_stacking_20190203090858
20180212_0_0_stacking_20190204011505
20180312_0_0_stacking_20190204161144
20180409_0_0_stacking_20190205074251
20180514_3_0_stacking_20190206001831
20180611_0_0_stacking_20190213042343
20180716_0_0_stacking_20190206171645
20180813_1_0_stacking_20190207104526
20180910_0_0_stacking_20190208054215
20181015_0_2_stacking_20190117110334
20181112_0_0_stacking_20190220210822

"""