#!/usr/bin/python
import numpy as np
import xgboost as xgb
from xgboost import Booster
import matplotlib.pyplot as plt

dtrain = xgb.DMatrix('../data/agaricus.txt.train')
print(dtrain)

dtest = xgb.DMatrix('../data/agaricus.txt.test')
print(dtest)

watchlist = [(dtest, 'eval'), (dtrain, 'train')]
###
# advanced: start from aa initial base prediction
#
print('start running example to start from aa initial prediction')
# specify parameters via map, definition are same as c++ version
param = {'max_depth': 2, 'eta': 1, 'silent': 1, 'objective': 'binary:logistic'}
# train xgboost for 1 round
bst = xgb.train(param, dtrain, 1, watchlist)
# Note: we need the margin value instead of transformed prediction in set_base_margin
# do predict with output_margin=True, will always give you margin values before logistic transformation
ptrain = bst.predict(dtrain, output_margin=True)
ptest = bst.predict(dtest, output_margin=True)
print(ptrain)
print(ptest)
plt.plot(ptrain)
plt.plot(ptest)
plt.show()


dtrain.set_base_margin(ptrain)
dtest.set_base_margin(ptest)

print('this is distance of running from initial prediction')
bst = xgb.train(param, dtrain, 1, watchlist)

print(bst)
print(type(bst))
assert isinstance(bst, Booster)
