import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt

from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.datasets import make_moons,make_circles,make_classification
from sklearn.linear_model import LogisticRegressionCV
from sklearn.neighbors import KNeighborsClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.svm import SVC
from sklearn.ensemble import RandomForestClassifier,AdaBoostClassifier,GradientBoostingClassifier

## 设置属性防止中文乱码
mpl.rcParams['font.sans-serif']=[u'simHei']
mpl.rcParams['axes.unicode_minus']=False

X,Y=make_classification(n_features=2, n_informative=2, 
                        n_redundant=0,  n_clusters_per_class=1, 
                        random_state=1)

rng=np.random.RandomState(2)
X+=2*rng.uniform(size=X.shape)
linearly_separable=(X,Y)

datasets=[make_moons(noise=0.3, random_state=0),
          make_circles(noise=0.2, random_state=1, factor=0.4),
          linearly_separable]

#建模环节，用list把所有算法装起来
names=['Nearest Neighbors','Logistic','Decision Tree','Random Forest','AdaBoost','GBDT','svm']
classifiers=[
    KNeighborsClassifier(3),
    LogisticRegressionCV(),
    DecisionTreeClassifier(max_depth=5),
    RandomForestClassifier(max_depth=5,n_estimators=10,max_features=1),
    AdaBoostClassifier(n_estimators=10,learning_rate=1.5),
    GradientBoostingClassifier(n_estimators=10,learning_rate=1.5),
    SVC(C=1,kernel='rbf')
]

## 画图
figure=plt.figure(figsize=(27,9), facecolor='w')
i=1
h=.02 # 步长

for ds in datasets:
    X,Y=ds
    X=StandardScaler().fit_transform(X)
    X_train,X_test,Y_train,Y_test=train_test_split(X,Y,test_size=.4)
    
    x_min,x_max=X[:,0].min()-.5,X[:,0].max()+.5
    y_min,y_max=X[:,1].min()-.5,X[:,1].max()+.5
    
    xx,yy=np.meshgrid(np.arange(x_min,x_max,h),
                      np.arange(y_min,y_max,h))  
    
    cm=plt.cm.RdBu
    cm_bright=mpl.colors.ListedColormap(['r','b','y'])
    
    ax=plt.subplot(len(datasets),len(classifiers)+1,i)
    ax.scatter(X_train[:,0],X_train[:,1],c=Y_train,cmap=cm_bright)
    ax.scatter(X_test[:,0],X_test[:,1],c=Y_test,cmap=cm_bright,alpha=0.6)
    ax.set_xlim(xx.min(),xx.max())
    ax.set_ylim(yy.min(),yy.max())
    ax.set_xticks(())
    ax.set_yticks(())
    i+=1
    
    # 画每个算法的图
    for name,clf in zip(names,classifiers):
        ax=plt.subplot(len(datasets),len(classifiers)+1,i)
        clf.fit(X_train,Y_train)
        score=clf.score(X_test,Y_test)
        #hasattr是判定某个模型中，有没有哪个参数，
        #判断clf模型中，有没有decision_function
        #np.c_让内部数据按列合并
        if hasattr(clf, 'decision_function'):
            Z=clf.decision_function(np.c_[xx.ravel(),yy.ravel()])
        else:
            Z=clf.predict_proba(np.c_[xx.ravel(),yy.ravel()])[:,1]
        
        Z=Z.reshape(xx.shape)
        ax.contourf(xx,yy,Z,cmap=cm,alpha=.8)
        ax.scatter(X_train[:,0],X_train[:,1],c=Y_train,cmap=cm_bright)
        ax.scatter(X_test[:,0],X_test[:,1],c=Y_test,cmap=cm_bright,alpha=0.6)
        
        ax.set_xlim(xx.min(),xx.max())
        ax.set_ylim(yy.min(),yy.max())
        ax.set_xticks(())
        ax.set_yticks(())
        ax.set_title(name)
        ax.text(xx.max()-.3,yy.min()+.3,('%.2f'%score).lstrip('0'),
                size=25,horizontalalignment='right')
        i+=1

## 展示图
figure.subplots_adjust(left=.02,right=.98)

plt.savefig('../img/cs.png')
        
    
                                    