# import os
# import tensorflow as tf
# from sklearn import datasets
# import numpy as np
# from tensorflow import keras
# import shap


import xgboost
import shap
shap.initjs()

# train an XGBoost model
# X, y = shap.datasets.boston()
X, y = shap.datasets.iris()
model = xgboost.XGBRegressor().fit(X, y)

# explain the model's predictions using SHAP
# (same syntax works for LightGBM, CatBoost, scikit-learn, transformers, Spark, etc.)
explainer = shap.Explainer(model)
shap_values = explainer(X)

# visualize the first prediction's explanation
# shap.plots.waterfall(shap_values[0])
shap.image_plot(shap_values, X)


# model_path = "iris.h5"


# model = keras.models.load_model(model_path)#加载模型

# X,y = shap.datasets.iris()

# explainer = shap.Explainer(model)
# # to_explain = X[[39,41]]
# shap_values = explainer(X)

# # shap.plots.waterfall(shap_values[0])
# # e = shap.GradientExplainer(
# #     (model.layers[7].input, model.layers[-1].output),
# #     map2layer(X, 7),
# #     local_smoothing=0 # std dev of smoothing noise
# # )

# shap.image_plot(shap_values, X)


# import xgboost
# import shap
# shap.initjs()

# # train an XGBoost model
# X, y = shap.datasets.boston()
# model = xgboost.XGBRegressor().fit(X, y)

# # explain the model's predictions using SHAP
# # (same syntax works for LightGBM, CatBoost, scikit-learn, transformers, Spark, etc.)
# explainer = shap.Explainer(model)
# shap_values = explainer(X)

# # visualize the first prediction's explanation
# shap.plots.waterfall(shap_values[0])

'''
model_path = "models/" + source + "-" + str(pre_time) + "-" + ioh_time + "-" + str(ob_win) + ".h5"
#model_path = "models/tongji-1.0-1-5.h5"

model = rfg.create_model_2(dynamic.shape[1:], ob_win)

model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy', pm.AUC])

dynamic_dim = dynamic.reshape(dynamic.shape[0], dynamic.shape[1], dynamic.shape[2], 1)

history = model.fit([dynamic_dim, dynamic], label, epochs=200, batch_size=1024,class_weight=cw,
                    validation_split=0.3, verbose=2,
                    callbacks=[keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=0, patience=10, verbose=0, mode='min'),
                               keras.callbacks.ModelCheckpoint(model_path, monitor='val_loss', save_best_only=True, mode='min', verbose=0)])

if(str(flag)=="BT"):
                        model_path = "/home/mount/chy/models_all/" + source + "+BT-" + str(pre_time) + "-" + ioh_time + "-" + str(ob_win) + ".h5"
                        f = "test_result+BT.txt"#文件名
                        whether = "True"
                    if not os.path.exists(model_path):
                        print("model is not exist...")
                        os._exit(0)
                    model = keras.models.load_model(model_path, custom_objects={"AUC": pm.AUC})

'''
