import numpy as np
import sklearn.datasets as datasets
import matplotlib.pyplot as plt
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn.svm import SVC
from sklearn.ensemble import VotingClassifier
from sklearn.model_selection import train_test_split

x, y = datasets.make_moons(500, noise=0.3, random_state=42)

plt.scatter(x[y == 0, 0], x[y == 0, 1])
plt.scatter(x[y == 1, 0], x[y == 1, 1])
plt.show()

x_train, x_test, y_train, y_test = train_test_split(x, y)


vot_claz = VotingClassifier(estimators=[
    ('logic_reg', LogisticRegression(solver='lbfgs')),
    ('svm', SVC(gamma='auto')),
    ('des_tree', DecisionTreeClassifier()),
], voting = 'hard')

vot_claz.fit(x_train, y_train)
print(vot_claz.score(x_test, y_test))

vot_claz2 = VotingClassifier(estimators=[
    ('logic_reg', LogisticRegression(solver='lbfgs')),
    ('svm', SVC(gamma='auto', probability=True)),
    ('des_tree', DecisionTreeClassifier()),
], voting = 'soft')

vot_claz2.fit(x_train, y_train)
print(vot_claz2.score(x_test, y_test))


########################### 随机森林  ##############################
# 通过计算每个算法独立的概率   然后判断分类
from sklearn.ensemble import BaggingClassifier

bagging_claz = BaggingClassifier(DecisionTreeClassifier(), n_estimators=500, max_samples=100,
                  bootstrap=True, oob_score=True, n_jobs=-1, max_features=1, bootstrap_features=True)
bagging_claz.fit(x, y)
print(bagging_claz.oob_score_)

from sklearn.ensemble import RandomForestClassifier

rf_clf = RandomForestClassifier(500, n_jobs=-1, oob_score=True, random_state=666, max_leaf_nodes=16)
rf_clf.fit(x, y)
print(rf_clf.oob_score_)


#### 算法增强 类型集成算法
from sklearn.ensemble import AdaBoostClassifier
from sklearn.ensemble import GradientBoostingClassifier