import numpy as np
import matplotlib.pyplot as plt

from sklearn import datasets
from sklearn.tree import DecisionTreeClassifier as DTC
from sklearn.metrics import zero_one_loss
from sklearn.metrics import accuracy_score
from sklearn.ensemble import AdaBoostClassifier as ABC

n_estimators = 400
learning_rate = 1.

X,y = datasets.make_hastie_10_2(n_samples=12000, random_state=1)

X_test,y_test = X[2000:], y[2000:]
X_train,y_train = X[:2000], y[:2000]

dt_stump = DTC(max_depth=1, min_samples_leaf=1)
dt_stump.fit(X_train, y_train)
'''
pred = dt_stump.predict(X_test)
accuracy = accuracy_score(y_test, pred)
print("accuracy=%s" % round(accuracy, 3))
'''
dt_stump_err = 1.0 - dt_stump.score(X_test, y_test)
print("dt_stump_err = %s" % dt_stump_err)

dt = DTC(max_depth=9, min_samples_leaf=1)
dt.fit(X_train, y_train)
dt_err = 1.0 - dt.score(X_test, y_test)
print("dt_err = %s" % dt_err)

ada_discrete = ABC(
    base_estimator=dt_stump,
    learning_rate=learning_rate,
    n_estimators=n_estimators,
    algorithm="SAMME")
ada_discrete.fit(X_train, y_train)

ada_real = ABC(
    base_estimator=dt_stump,
    learning_rate=learning_rate,
    n_estimators=n_estimators,
    algorithm="SAMME.R")
ada_real.fit(X_train, y_train)

fig = plt.figure()
ax = fig.add_subplot(111)

ax.plot([1, n_estimators], [dt_stump_err]*2, 'k-',label='Decision Stump Error')
ax.plot([1, n_estimators], [dt_err]*2, 'k--',label='Decision Stump Error')

ada_discrete_err = np.zeros((n_estimators,))
for i,pred in enumerate(ada_discrete.staged_predict(X_test)):
  ada_discrete_err[i] = zero_one_loss(y_test, pred)

ada_discrete_train_err = np.zeros((n_estimators,))
for i,pred in enumerate(ada_discrete.staged_predict(X_train)):
  ada_discrete_train_err[i] = zero_one_loss(y_train, pred)

ada_real_err = np.zeros((n_estimators,))
for i,pred in enumerate(ada_real.staged_predict(X_test)):
  ada_real_err[i] = zero_one_loss(y_test, pred)

ada_real_train_err = np.zeros((n_estimators,))
for i,pred in enumerate(ada_real.staged_predict(X_train)):
  ada_real_train_err[i] = zero_one_loss(y_train, pred)

print("err len is %s, est num is %s" % (len(ada_real_train_err), n_estimators))

ax.plot(np.arange(n_estimators)+1, ada_discrete_err, label='Discrete AdaBoost Test Error', color='red')
ax.plot(np.arange(n_estimators)+1, ada_discrete_train_err, label='Discrete AdaBoost Train Error', color='blue')
ax.plot(np.arange(n_estimators)+1, ada_real_err, label='Real AdaBoost Test Error', color='orange')
ax.plot(np.arange(n_estimators)+1, ada_real_train_err, label='Real AdaBoost Train Error', color='green')

ax.set_ylim((0.0, 0.5))
ax.set_xlabel('n_estimators')
ax.set_ylabel('error rate')

leg = ax.legend(loc='upper right', fancybox=True)
leg.get_frame().set_alpha(0.7)

plt.show()
