library(tidyverse)
library(corrplot)

# Load data
data(Pima.te, package = "MASS")
pima <- rbind(Pima.tr, Pima.te)
str(pima)
pima.melt <- gather(pima, key = "variable", value = "value", 1:7, factor_key = F)
str(pima.melt)

ggplot(pima.melt, aes(x = type, y = value)) +
  geom_boxplot() +
  facet_wrap(~ variable, ncol = 2)

# Scale
pima.scale <- data.frame(scale(pima[, -8]))
str(pima.scale)
pima.scale$type <- pima$type
pima.scale.melt <- gather(pima.scale, key = "variable", value = "value", -8)
str(pima.scale.melt)
ggplot(pima.scale.melt, aes(x = type, y = value)) +
  geom_boxplot() +
  facet_wrap(~ variable, ncol = 2)

# Display the correlation in the data
corrplot.mixed(cor(pima.scale[, -8]))

table(pima.scale$type)

# KNN
library(mlr3)
knn.task <- TaskClassif$new(id = "pima.knn", backend = pima.scale, target = "type")
knn.lrn <- lrn("classif.kknn")
cv <- rsmp("cv", folds = 10)

rr <- resample(knn.task, knn.lrn, cv, store_models = TRUE)
rr$prediction()
rr$aggregate()

as.data.table(knn.lrn$param_set$values)
knn.lrn

# Optimizing
library(paradox)
library(mlr3tuning)
tune_ps <- ParamSet$new(list(
  ParamInt$new("k", lower = 1, upper = 20),
  ParamDbl$new("distance", lower = 1, upper = 2),
  ParamFct$new("kernel", levels = c("triangular", "epanechnikov"))
))

measure <- msr("classif.ce")
eval20 <- term("evals", n_evals = 100)


inst <- TuningInstance$new(
  task = knn.task,
  learner = knn.lrn,
  resampling = cv,
  measures = measure,
  param_set = tune_ps,
  terminator = eval20
)

tuner <- tnr("grid_search", resolution = 20)

result <- tuner$tune(inst)

knn.lrn$param_set$values <- inst$result$params
knn.lrn

knn.lrn$predict_types
knn.lrn$predict_type <- "prob"
knn.lrn$train(knn.task)
pred <- knn.lrn$predict(knn.task)
pred$confusion

library(mlr3viz)
autoplot(pred)
autoplot(pred, type = "roc")
