library(caret)
set.seed(123)
index <- createDataPartition(mimic_sad_aki_ml$sad,p=0.7,list = F)
train <- mimic_sad_aki_ml[index,]
test <- mimic_sad_aki_ml[-index,]

#将训练集中的相应列转换为因子类型
train$sad <- as.factor(as.character(train$sad))
train$vent <- as.factor(train$vent)
train$crrt <- as.factor(train$crrt)
train$seda <- as.factor(train$seda)
train$ami <- as.factor(train$ami)
train$ckd <- as.factor(train$ckd)
train$copd <- as.factor(train$copd)
train$dm <- as.factor(train$dm)
train$stroke <- as.factor(train$stroke)
str(train)

#将测试集中的相应列转换为因子类型
test$sad <- as.factor(as.character(test$sad))
test$vent <- as.factor(test$vent)
test$crrt <- as.factor(test$crrt)
test$seda <- as.factor(test$seda)
test$ami <- as.factor(test$ami)
test$ckd <- as.factor(test$ckd)
test$copd <- as.factor(test$copd)
test$dm <- as.factor(test$dm)
test$stroke <- as.factor(test$stroke)
str(test)


#LR model
lm_model <- glm(sad~.,data = train,family = binomial(link = "logit"))
summary(lm_model)
lm_pred <- predict(lm_model,test,type="response")
threshold <- 0.5
predictions_binary <- ifelse(lm_pred>threshold,1,0)
confusionMatrix(as.factor(predictions_binary),as.factor(test$sad))
LR_pred <- predictions_binary

#SVM model
library(e1071)
svm_model <- svm(sad ~ ., data = train, probability = TRUE)
svm_pred <- predict(svm_model, test, probability = TRUE)
a <- data.frame(svm_pred)
ab <- as.factor(as.character(a$svm_pred))
svm_sad <- as.factor(test$sad)
svm_pred_prob <- attr(svm_pred, "probabilities")[, 2]
confusionMatrix(data = ab, reference = svm_sad)
train$sad <- as.numeric(as.character(train$sad)) 
test$sad <- as.numeric(as.character(test$sad)) 
SVM_pred <- as.numeric(a$svm_pred)


#SVM model new code
#library(e1071)
#svm_model <- svm(sad ~ .,data = train, method="C-classification",
#                 kernel="radial")#训练SVM模型
#svm_pred <- predict(svm_model,test)#使用验证集进行预测
#计算预测的准确性
#confusion_matrix <- table(Predicted = svm_pred, Actual = test[["sad"]])
#print(confusion_matrix)
#安装并加载pROC包进行ROC分析
#library(pROC)
#计算ROC曲线
#roc_result <- roc(test[["sad"]],as.numeric(svm_pred))
#plot(roc_result)
#auc(roc_result)#计算AUC值

#XGBoost model
library(xgboost)
train_matrix <- xgb.DMatrix(data.matrix(train[,-1]),label = train$sad)
test_matrix <- xgb.DMatrix(data.matrix(test[,-1]),label = test$sad)
params <- list(objective = "binary:logistic",eval_metric="logloss",max_depth=3,eta = 0.1,
               gamma=0.5,colsample_bytree = 1,min_child_weight=1,subsample=0.5)
watchlist <- list(train = train_matrix,val=test_matrix)
xgb_model <- xgb.train(params = params,data = train_matrix,nrounds = 125,watchlist = watchlist,
                       early_stopping_rounds = 10,print_every_n = 10,maximize = FALSE)
xgb_pred_prob <- predict(xgb_model,test_matrix)
xgb_pred <- ifelse(xgb_pred_prob > 0.5,1,0)
xgb_pred_factor <- factor(xgb_pred,levels = c(0,1))
test_sad_factor <- factor(test$sad,levels = c(0,1))
confusionMatrix(data = xgb_pred_factor,reference=test_sad_factor)

#RF model（随机森林）
library(randomForest)
train$sad <- as.factor(train$sad)
test$sad <- as.factor(test$sad)
rf_model <- randomForest(sad~.,data = train,ntree = 500,mtry = 6)
rf_pred <- predict(rf_model,newdata = test)
confusionMatrix(data = rf_pred,reference = test$sad)

#DT model(决策树)
library(rpart)
dt_model <- rpart(sad~.,data = train,method = "class")
dt_pred_prob <- predict(dt_model,newdata = test,type="prob")[,2]
dt_pred <- ifelse(dt_pred_prob > 0.5,1,0)
confusionMatrix(factor(dt_pred,levels = c("0","1")),test$sad)

#NB model(朴素贝叶斯)
library(e1071)
nb_model <- naiveBayes(sad~.,data = train)
nb_pred_prob <- predict(nb_model,newdata = test,type="raw")[,2]
nb_pred <- ifelse(nb_pred_prob > 0.5,1,0)
confusionMatrix(factor(nb_pred,levels = c("0","1")),test$sad)


#KNN model(K最近邻回归)
library(kknn)
knn_model <- kknn(sad~.,train,test,k=10,distance = 2,kernel = "rectangular")
knn_pred_prob <- predict(knn_model,newdata=test,type="prob")
knn_pred_prob <- knn_pred_prob[,"1"]
knn_pred_prob <- as.numeric(knn_pred_prob)
threshold <- 0.5
knn_pred <- ifelse(knn_pred_prob > threshold,1,0)
confusionMatrix(factor(knn_pred,levels = c("0","1")),test$sad)

#ROC图绘制(plotROC)
# 加载plotROC包
library(plotROC)
#建立真实值与预测概率的数据框并转换为数值型变量
test_sad <- test$sad
ML_ROC <- data.frame(test_sad,lm_pred,SVM_pred,xgb_pred_prob,rf_pred,dt_pred_prob,nb_pred_prob,knn_pred)
ML_ROC$test_sad <-as.numeric(as.character(ML_ROC$test_sad))
ML_ROC$rf_pred <-as.numeric(as.character(ML_ROC$rf_pred))
str(ML_ROC)
#使用melt_roc()函数准备数据,将原本的宽格式数据转换为长格式
long_df <- melt_roc(ML_ROC,"test_sad",c("lm_pred", "SVM_pred", "xgb_pred_prob", "rf_pred", "dt_pred_prob", "nb_pred_prob", "knn_pred"))
#使用ggplot()绘制ROC曲线
roc_plot <- ggplot(long_df, aes(d = D, m = M, color = name)) +
  geom_roc(n.cuts = 0) +
  theme_bw()+
  style_roc() +
  xlab("1 - Specificity") +
  ylab("Sensitivity") +
  ggtitle("ROC Curve Comparison") +
  guides(color = guide_legend(title = "Model", title.position = "top", direction = "vertical", order = 1))
print(roc_plot)
#指定图例位置并绘制曲线
roc_plot <- roc_plot + 
  scale_color_manual(values = c("Model 1" = "red", "Model 2" = "blue")) +
  theme(legend.position = "bottom right") # 将图例放在右下角
print(roc_plot)

#ROC图绘制(pROC)
library(pROC)
library(ggplot2)
ML_ROC <- data.frame(test_sad,
                     lm_pred,LR_pred,
                     svm_pred,svm_pred_prob,
                     xgb_pred_prob,xgb_pred,
                     rf_pred,
                     dt_pred_prob,dt_pred,
                     nb_pred_prob,nb_pred,
                     knn_pred,knn_pred_prob
                     )
ML_ROC$test_sad <- as.numeric(as.character(ML_ROC$test_sad))
ML_ROC$svm_pred <- as.numeric(as.character(ML_ROC$svm_pred))
ML_ROC$rf_pred <- as.numeric(as.character(ML_ROC$rf_pred))
str(ML_ROC)
#指定结局变量并命名
LR_ROC <- roc(ML_ROC$test_sad,ML_ROC$lm_pred);LR_ROC
SVM_ROC <- roc(ML_ROC$test_sad,ML_ROC$svm_pred_prob);SVM_ROC
XGB_ROC <- roc(ML_ROC$test_sad,ML_ROC$xgb_pred_prob);XGB_ROC
RF_ROC <- roc(ML_ROC$test_sad,ML_ROC$rf_pred);RF_ROC
DT_ROC <- roc(ML_ROC$test_sad,ML_ROC$dt_pred_prob);DT_ROC
NB_ROC <- roc(ML_ROC$test_sad,ML_ROC$nb_pred_prob);NB_ROC
KNN_ROC <- roc(ML_ROC$test_sad,ML_ROC$knn_pred_prob);KNN_ROC

#输出AUC的95%置信区间
auc(LR_ROC)
auc(SVM_ROC)
auc(XGB_ROC)
auc(RF_ROC)
auc(DT_ROC)
auc(NB_ROC)
auc(KNN_ROC)
ci.auc(LR_ROC)
ci.auc(SVM_ROC)
ci.auc(XGB_ROC)
ci.auc(RF_ROC)
ci.auc(DT_ROC)
ci.auc(NB_ROC)
ci.auc(KNN_ROC)

#指定图像保存路径和文件名
#png("ML ROC curves.png",width = 800,height = 800)

#画出第一条曲线
plot.roc(LR_ROC,
        max.auc.polygon=F,#填充整个图像
        smooth=F,#绘制不平滑曲线“
        main="Comparison of different ML models of ROC curves",#添加标题
        col = "red",#曲线颜色为红色
        legacy.axes=T,#使横轴从0到1，表示为1-特异度
        lwd=2,
        print.auc=TRUE,print.auc.x=0.2,print.auc.y=0.8
        )
#逐步添加其他曲线
plot.roc(SVM_ROC,
         add = T,
         col = "orange",
         smooth = F,
         lwd=2,
         print.auc=TRUE,print.auc.x=0.2,print.auc.y=0.75)
plot.roc(XGB_ROC,
         add = T,
         col = "black",
         smooth = F,
         lwd=2,
         print.auc=TRUE,print.auc.x=0.2,print.auc.y=0.70)
plot.roc(RF_ROC,
         add = T,
         col = "pink",
         smooth = F,
         lwd=2,
         print.auc=TRUE,print.auc.x=0.2,print.auc.y=0.65)
plot.roc(DT_ROC,
         add = T,
         col = "skyblue",
         smooth = F,
         lwd=2,
         print.auc=TRUE,print.auc.x=0.2,print.auc.y=0.60)
plot.roc(NB_ROC,
         add = T,
         col = "blue",
         smooth = F,
         lwd=2,
         print.auc=TRUE,print.auc.x=0.2,print.auc.y=0.55)
plot.roc(KNN_ROC,
         add = T,
         col = "purple",
         smooth = F,
         lwd=2,
         print.auc=TRUE,print.auc.x=0.2,print.auc.y=0.50)
#增加图例
legend("bottomright",
       legend = c("逻辑回归模型（LR）","支持向量机模型（SVM）","极限梯度提升模型（XGBoost）","随机森林模型（RF）","决策树模型（DT）","朴素贝叶斯模型（NB）","K最近邻模型（KNN）"),
       col = c("red","orange","black","pink","skyblue","blue","purple"),
       lwd = 1,
       cex=0.7
       )

#校准曲线绘制
#使用rms包绘制
library(rms)
ML_ROC$test_sad <- as.factor(as.character(ML_ROC$test_sad))
fit_XGB <- lrm(test_sad~xgb_pred_prob,data = ML_ROC,x=TRUE,y=TRUE)
fit_SVM <- lrm(test_sad~svm_pred_prob,data = ML_ROC,x=TRUE,y=TRUE)
fit_LR <- lrm(test_sad~lm_pred,data = ML_ROC,x=TRUE,y=TRUE)
cal_XGB <- calibrate(fit_XGB,method="boot",B=1000)
cal_SVM <- calibrate(fit_SVM,method="boot",B=1000)
cal_LR <- calibrate(fit_LR,method="boot",B=1000)
plot(1,type="n",#绘制一幅空白图
     xlim=c(0,1),#设置X轴范围为0-1
     ylim=c(0,1),#设置y轴范围为0-1
     xaxs="i",#设置图片原点
     yaxs="i",#设置图片原点
     xlab="Predicted Probability",#设置x轴名称
     ylab="Observed Probability",#设置y轴名称
     legend=FALSE,#不显示图例
     subtitle=FALSE,#不显示副标题
     #设置坐标轴刻度线及名称的字体相对大小
     cex=1.5,
     cex.axis=1.5,
     cex.lab=1.5)
abline(0,1,col="grey",lty=2,lwd=1)
lines(cal_LR[,c("predy","calibrated.corrected")],lty=1,lwd=1,col="red")
lines(cal_XGB[,c("predy","calibrated.corrected")],lty=1,lwd=1,col="skyblue")
lines(cal_SVM[,c("predy","calibrated.corrected")],lty=1,lwd=1,col="darkgreen")
legend(0.01,0.95,
       c("逻辑回归模型",
         "XGBoost模型",
         "SVM模型"),
       lty=c(1,1,1),
       lwd=c(1,1,1),
       col = c("red","skyblue","darkgreen"),
       bty = "n",#是否显示图例边框
       cex=1.5)
#plot(cal_XGB,col="black")
#plot(cal_SVM,col="orange")
#plot(cal_LR,col="red")

#使用ggplot2包绘制（暂时不用）
library(ggplot2)
#计算每个分位数的平均值
ML_cali_cur <- ML_ROC
quantiles <- quantile(ML_cali_cur$lm_pred,probs = seq(0,1,by=0.1))
ML_cali_cur$quantile <- cut(ML_cali_cur$lm_pred,breaks = quantiles,include.lowest = TRUE)
#计算每个分位数的平均预测概率和时机概率
averages <- aggregate(cbind(test_sad,lm_pred)~quantile,ML_cali_cur,mean)
#绘制校准曲线
ggplot(averages,aes(x=lm_pred,y=test_sad))+
  geom_point()+
  geom_smooth(method = "lm",se=T)+
  geom_abline(slope = 1,intercept = 0,linetype="dashed")+
  labs(x="Predicted Probability",y="Observed Probability")


#决策曲线绘制
#使用rmda包
library(rmda)
#整理数据类型
ML_DCA_cur <- ML_ROC
ML_DCA_cur$test_sad <- as.numeric(as.character(ML_DCA_cur$test_sad))
ML_DCA_cur$lm_pred <- as.numeric(as.character(ML_DCA_cur$lm_pred))
ML_DCA_cur$xgb_pred_prob <- as.numeric(as.character(ML_DCA_cur$xgb_pred_prob))
ML_DCA_cur$svm_pred_prob <- as.numeric(as.character(ML_DCA_cur$svm_pred_prob))
#构建DCA曲线
DCA_LR <- decision_curve(test_sad~lm_pred,
                         family = binomial(link = 'logit'),
                         thresholds = seq(0,1,by=0.01),
                         confidence.intervals = 0.95,
                         study.design = 'cohort',
                         data=ML_DCA_cur)
DCA_XGB <- decision_curve(test_sad~xgb_pred_prob,
                          family = binomial(link = 'logit'),
                          thresholds = seq(0,1,by=0.1),
                          confidence.intervals = 0.95,
                          study.design = 'cohort',
                          data = ML_DCA_cur)
DCA_SVM <- decision_curve(test_sad~svm_pred_prob,
                          family = binomial(link = 'logit'),
                          thresholds = seq(0,1,by=0.1),
                          confidence.intervals = 0.95,
                          study.design = 'cohort',
                          data = ML_DCA_cur)
plot_decision_curve(list(DCA_LR,DCA_XGB,DCA_SVM),
                    curve.names = c('逻辑回归模型','XGBoost模型','SVM模型'),
                    xlim = c(0,1),
                    cost.benefit.axis = TRUE,
                    col = c("red","skyblue","darkgreen"),
                    confidence.intervals = FALSE,
                    standardize = FALSE)
#DCA_LR$derived.data

#使用dcurves包
library(dcurves)
#整理数据类型
ML_DCA_cur <- ML_ROC
ML_DCA_cur$test_sad <- as.numeric(as.character(ML_DCA_cur$test_sad))
ML_DCA_cur$lm_pred <- as.numeric(as.character(ML_DCA_cur$lm_pred))
ML_DCA_cur$xgb_pred_prob <- as.numeric(as.character(ML_DCA_cur$xgb_pred_prob))
ML_DCA_cur$svm_pred_prob <- as.numeric(as.character(ML_DCA_cur$svm_pred_prob))
#绘制DCA曲线
dcurves::dca(formula = test_sad~lm_pred+xgb_pred_prob+svm_pred_prob,
             label = list(lm_pred="逻辑回归模型",xgb_pred_prob="XGBoost模型",svm_pred_prob="SVM模型"),
             data = ML_DCA_cur) %>% 
  plot(smooth=TRUE)+
  ggplot2::labs(x="Threshold Probability")+
  theme_bw()


##SHAP值进行模型解释（用这个）
library(shapviz)
library(SHAPforxgboost)
library(tidyverse)
library(caret)
train$sad <- as.numeric(as.character(train$sad))
train$vent <- as.numeric(as.character(train$vent))
train$crrt <- as.numeric(as.character(train$crrt))
train$seda <- as.numeric(as.character(train$seda))
train$ami <- as.numeric(as.character(train$ami))
train$ckd <- as.numeric(as.character(train$ckd))
train$copd <- as.numeric(as.character(train$copd))
train$dm <- as.numeric(as.character(train$dm))
train$stroke <- as.numeric(as.character(train$stroke))
shap_values <- shapviz::shapviz(xgb_model,X_pred = as.matrix(train[2:27]))
sv_importance(shap_values,kind = "bar",show_numbers = T)+theme_bw()
sv_importance(shap_values,kind = "beeswarm",show_numbers = T)+theme_bw()


#SHAP值进行模型解释
#重新进行XGBoost模型建立
library(shapviz)
library(SHAPforxgboost)
library(tidyverse)
library(caret)
library(pROC)
library(tibble)
library(ROCit)
train$sad <- as.numeric(as.character(train$sad))
train$vent <- as.numeric(as.character(train$vent))
train$crrt <- as.numeric(as.character(train$crrt))
train$seda <- as.numeric(as.character(train$seda))
train$ami <- as.numeric(as.character(train$ami))
train$ckd <- as.numeric(as.character(train$ckd))
train$copd <- as.numeric(as.character(train$copd))
train$dm <- as.numeric(as.character(train$dm))
train$stroke <- as.numeric(as.character(train$stroke))
model_xgboost =xgboost(
  data=as.matrix(train[,c(2:ncol(train))]),
                 label=train$sad,
                 max_depth=3,
                 eta=1,
                 nthread=2,
                 nrounds=10,
                 objective="binary:logistic")
shap_values <- shap.values(xgb_model = model_xgboost,X_train = as.matrix(train[,2:27]))#训练模型不能有应变量
shap_values$mean_shap_score


#绘制部分依赖图
library(xgboost)
library(pdp)
library(ggplot2)
library(shapviz)
#计算SHAP值
shap_data <- shapviz(xgb_model,X_pred = as.matrix(train[2:27]))
#绘制部分依赖图
sv_dependence(shap_data,v = "icustay_d",alpha = 0.5,size = 1.5,color_var = NULL,color = "#3b528b")
sv_dependence(shap_data,v = "vent",alpha = 0.5,size = 1.5,color_var = NULL,color = "#3b528b")
sv_dependence(shap_data,v = "gcs",alpha = 0.5,size = 1.5,color_var = NULL,color = "#3b528b")
sv_dependence(shap_data,v = "aniongap",alpha = 0.5,size = 1.5,color_var = NULL,color = "#3b528b")
sv_dependence(shap_data,v = "temperature",alpha = 0.5,size = 1.5,color_var = NULL,color = "#3b528b")
sv_dependence(shap_data,v = "age",alpha = 0.5,size = 1.5,color_var = NULL,color = "#3b528b")
sv_dependence(shap_data,v = "Na",alpha = 0.5,size = 1.5,color_var = NULL,color = "#3b528b")
sv_dependence(shap_data,v = "inr",alpha = 0.5,size = 1.5,color_var = NULL,color = "#3b528b")
sv_dependence(shap_data,v = "platelet",alpha = 0.5,size = 1.5,color_var = NULL,color = "#3b528b")
sv_dependence(shap_data,v = "hospstay_d",alpha = 0.5,size = 1.5,color_var = NULL,color = "#3b528b")
sv_dependence(shap_data,v = "glu",alpha = 0.5,size = 1.5,color_var = NULL,color = "#3b528b")
sv_dependence(shap_data,v = "resp_rate",alpha = 0.5,size = 1.5,color_var = NULL,color = "#3b528b")
sv_dependence(shap_data,v = "Mg",alpha = 0.5,size = 1.5,color_var = NULL,color = "#3b528b")
sv_dependence(shap_data,v = "bun",alpha = 0.5,size = 1.5,color_var = NULL,color = "#3b528b")
sv_dependence(shap_data,v = "dbp",alpha = 0.5,size = 1.5,color_var = NULL,color = "#3b528b")
