# -*- coding: utf-8 -*-
# @日期    : 2021/11/28 10:39
# @作者  : 万方名
# @FileName: train.py


import xgboost as xgb

xgb_train = xgb.DMatrix('../data/agaricus.txt.train')
xgb_test = xgb.DMatrix('../data/agaricus.txt.test')

# train model
params = {
    'objective': 'binary:logistic',  # 学习目标：二分类问题，最终输出为sigmoid变换后的概率
    'booster': 'gbtree',  # booster类型：树模型
    'eta': 1.0,  # 学习率
    'gamma': 1.0,  # 节点分裂时损失函数减小的最小值
    'min_child_weight': 1,  # 叶子结点最小样本权重和，若节点分裂导致叶子结点的样本权重和小于该值则节点不进行分裂
    'max_depth': 3  # 决策树分裂的最大深度
}

num_round = 2  # 两轮训练
watchlist = [(xgb_train, 'train'), (xgb_test, 'test')]

model = xgb.train(params, xgb_train, num_round, watchlist)

model.save_model('../data/0002.model')

# 加载模型进行预测
bst = xgb.Booster()
bst.load_model('../data/0002.model')
pred = bst.predict(xgb_test)
