# shap 模型
import numpy as np
import pandas as pd
import os
import seaborn as sns
import matplotlib.pyplot as plt
from sklearn.preprocessing import scale, StandardScaler
from sklearn.model_selection import train_test_split, GridSearchCV, cross_val_score
from sklearn.metrics import confusion_matrix, accuracy_score, mean_squared_error, r2_score, roc_auc_score, roc_curve, classification_report
from sklearn.linear_model import LogisticRegression
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.neural_network import MLPClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import GradientBoostingClassifier
from lightgbm import LGBMClassifier
from sklearn.model_selection import KFold

from sklearn.metrics import f1_score,precision_score,recall_score,roc_auc_score,accuracy_score,roc_curve
import matplotlib.pyplot as plt
from xgboost.sklearn import XGBClassifier
import lightgbm as lgb
import shap

data = pd.read_csv('../featureEngineering/featuredData.csv')
y = data['Outcome']
X = data.drop(['Outcome'],axis=1)

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=1234)

gbm = LGBMClassifier(learning_rate=0.01, max_depth=5, n_estimators=500, num_leaves=50, min_child_weight=12, gamma=0.35)
gbm.fit(X_train, y_train)


plgbm_tuned_y_pre=gbm.predict(X_test)
plgbm_tuned_y_proba=gbm.predict_proba(X_test)

plgbm_tuned_accuracy_score=accuracy_score(y_test,plgbm_tuned_y_pre)
plgbm_tuned_preci_score=precision_score(y_test,plgbm_tuned_y_pre)
plgbm_tuned_recall_score=recall_score(y_test,plgbm_tuned_y_pre)
plgbm_tuned_f1_score=f1_score(y_test,plgbm_tuned_y_pre)
plgbm_tuned_auc=roc_auc_score(y_test,plgbm_tuned_y_proba[:,1])
print('plgbm_tuned_accuracy_score: %f,plgbm_tuned_preci_score: %f,plgbm_tuned_recall_score: %f,plgbm_tuned_f1_score: %f,plgbm_tuned_auc: %f'
      %(plgbm_tuned_accuracy_score,plgbm_tuned_preci_score,plgbm_tuned_recall_score,plgbm_tuned_f1_score,plgbm_tuned_auc))

shap_values = shap.TreeExplainer(gbm).shap_values(X)
shap.summary_plot(shap_values, X)

shap.summary_plot(shap_values[1],X)

# shap.dependence_plot('Insulin', shap_values,X_train,interaction_index='Glucose')

# shap.dependence_plot('Glucose', shap_values, X_train, interaction_index='minimum_nights')

shap.dependence_plot("Insulin",shap_values[1],X)
shap.dependence_plot("Glucose",shap_values[1],X)
shap.dependence_plot("Age",shap_values[1],X)
shap.dependence_plot("DiabetesPedigreeFunction",shap_values[1],X)
shap.dependence_plot("SkinThickness",shap_values[1],X)
shap.dependence_plot("Pregnancies",shap_values[1],X)
shap.dependence_plot("BloodPressure",shap_values[1],X)