# -*- coding: utf-8 -*-
"""
Created on Fri Feb 19 15:57:07 2021

@author: Administrator
"""

import xgboost as xgb
import pandas as pd
import numpy as np
import matplotlib.pylab as plt




if __name__ == '__main__':
    model_set = pd.read_csv('model_set.csv')
    test_set = pd.read_csv('test_set.csv')
    
    X_trian, y_train = model_set.iloc[:, 1:], model_set.iloc[:, 0]
    X_test, y_test = test_set.iloc[:, 1:], test_set.iloc[:, 0]
    dtrain = xgb.DMatrix(X_trian, y_train)
    dtest = xgb.DMatrix(X_test, y_test)
    
    #确定number of tree
    param1 = {'silent':True #并非默认
         ,'obj':'binary:logistic' #并非默认
         ,'eval_metric': 'auc'
         ,"subsample":1
         ,"max_depth":9
         ,"eta":0.2
         ,"gamma":0
         ,"lambda":1
         ,"alpha":0
         ,"colsample_bytree":1
         ,"colsample_bylevel":1
         ,"colsample_bynode":1
         ,"nfold":5
         }
    
    #逐个调参
    param2 = {'silent':True #并非默认
         ,'obj':'binary:logistic' #并非默认
         ,'eval_metric': 'auc'
         ,"eta":.18
         ,"max_depth":9
         ,"gamma":0
         ,"colsample_bytree":.8
            }
    param3 = {'silent':True #并非默认
         ,'obj':'binary:logistic' #并非默认
         ,'eval_metric': 'auc'
         ,"max_depth":9
         ,"eta":0.22
         ,"gamma":0
         ,"colsample_bytree":.6
            }
    
    num_round = 400
    cvresult1 = xgb.cv(param1, dtrain, num_round)
    cvresult2 = xgb.cv(param2, dtrain, num_round)
    cvresult3 = xgb.cv(param3, dtrain, num_round)
    
    fig,ax = plt.subplots(1,figsize=(15,10))
    #ax.set_ylim(top=5)
    ax.grid()
    ax.plot(range(1,401),cvresult1.iloc[:,0],c="red",label="train,original")
    ax.plot(range(1,401),cvresult1.iloc[:,2],c="orange", linestyle='-.', label="test,original")

    ax.plot(range(1,401),cvresult2.iloc[:,0],c="blue",label="train,2")
    ax.plot(range(1,401),cvresult2.iloc[:,2],c="green", linestyle='-.', label="test,2")

    ax.plot(range(1,401),cvresult3.iloc[:,0],c="gray",label="train,3")
    ax.plot(range(1,401),cvresult3.iloc[:,2],c="pink", linestyle='-.', label="test,3")
    ax.legend(fontsize="xx-large")
    plt.show()    
    
    
    
    ####build model with final parameters
    xgb_model = xgb.train(param1, dtrain, num_boost_round=400)
    y_pre = xgb_model.predict(dtest)
    y_pre = np.where(y_pre < .5, 0, 1)
    
    #evaluation
    from sklearn import metrics
    print (metrics.roc_auc_score(y_test, y_pre))
    metrics.confusion_matrix(y_test, y_pre)






    
