package com.feidee.fd.sml.algorithm.component.ml.classification

import com.feidee.fd.sml.algorithm.component.ml.MLParam
import org.apache.spark.ml.PipelineStage
import org.apache.spark.ml.classification.GBTClassifier

/**
  * @Author songhaicheng
  * @Date 2018/08/17
  * @Email: haicheng_song@sui.com
  */
case class GBDTParam(
                      override val input_pt: String,
                      override val output_pt: String,
                      override val hive_table: String,
                      override val flow_time: String,
                      override val featuresCol: String,
                      override var labelCol: String,
                      override var predictionCol: String,
                      override val modelPath: String,
                      override val metrics: Array[String],
                      // 原始预测值列名，默认 rawPrediction
                      rawPredictionCol: String,
                      // 预测概率列名，默认 probability
                      probabilityCol: String,
                      // 迭代数，需大于 0，不填或填 0 时使用算法默认值
                      numIterations: Int,
                      // 树最大深度， >= 0，默认 5
                      maxDepth: Int,
                      // 最大装箱数，>= 2，默认 32
                      maxBins: Int,
                      // 检查点间隔，-1 或 >= 1,-1 时不做缓存，默认 10
                      checkpointInterval: Int,
                      // 最大内存 MB 单位，这个值越大，一次处理的节点划分就越多，>= 0，默认 256
                      maxMemoryInMB: Int,
                      // 每个节点最少实例树，>= 1，默认 1
                      minInstancesPerNode: Int,
                      // 随机种子，默认 123456
                      seed: Long,
                      // 采样率，每次选多少比例的样本构成新树，(0, 1]，默认 1.0
                      subsamplingRate: Double,
                      // 最小信息增益，>= 0.0，默认 0.0
                      minInfoGain: Double,
                      // 是否缓存节点 id，缓存可以加速深层树的训练，默认 false
                      cacheNodeIds: Boolean,
                      // 多分类的类别判定概率的阈值，数量需与类别数一致
                      thresholds: Array[Double]
                    ) extends MLParam {

  /**
    * 赋予参数默认值
    * @return
    */
  def this() = this(null, null, null, null, "features", "label", "prediction", null, new Array[String](0),
    "rawPrediction", "probability",
    0, 5, 32, 10, 256, 1, 123456, 1.0, 0.0,
    false, new Array[Double](0))

  override def verify(): Unit = {
    super.verify()
    require(numIterations >= 0, "param numIterations can't be negative")
    require(maxDepth >= 0, "param maxDepth can't be negative")
    require(maxBins >= 2, "param maxBins must be not less than 2")
    require(minInstancesPerNode >= 1, "param numTrees must be not less than 1")
    require(checkpointInterval == -1 || checkpointInterval >= 1, "param checkpointInterval must be" +
      " equals to -1 or not less than 1")
    require(maxMemoryInMB >= 0, "param maxMemoryInMB can't be negative")
    require(minInfoGain >= 0, "param minInfoGain can't be negative")
    require(subsamplingRate > 0 && subsamplingRate <= 1, "param subsamplingRate's range is (0, 1]")
  }

  override def toMap: Map[String, Any] = {
    var map = super.toMap
    map += ("rawPredictionCol" -> rawPredictionCol)
    map += ("probabilityCol" -> probabilityCol)
    map += ("numIterations" -> numIterations)
    map += ("maxDepth" -> maxDepth)
    map += ("maxBins" -> maxBins)
    map += ("checkpointInterval" -> checkpointInterval)
    map += ("maxMemoryInMB" -> maxMemoryInMB)
    map += ("minInstancesPerNode" -> minInstancesPerNode)
    map += ("seed" -> seed)
    map += ("subsamplingRate" -> subsamplingRate)
    map += ("minInfoGain" -> minInfoGain)
    map += ("cacheNodeIds" -> cacheNodeIds)
    map += ("thresholds" -> thresholds)
    map
  }

}


class GBDTComponent extends AbstractClassificationComponent[GBDTParam] {

  override def setUp(param: GBDTParam): PipelineStage = {
    val gbdt = new GBTClassifier()
      .setLabelCol(param.labelCol)
      .setFeaturesCol(param.featuresCol)
      .setRawPredictionCol(param.rawPredictionCol)
      .setPredictionCol(param.predictionCol)
      .setProbabilityCol(param.probabilityCol)
      .setMaxDepth(param.maxDepth)
      .setMaxBins(param.maxBins)
      .setCheckpointInterval(param.checkpointInterval)
      .setMaxMemoryInMB(param.maxMemoryInMB)
      .setMinInstancesPerNode(param.minInstancesPerNode)
      .setSeed(param.seed)
      .setSubsamplingRate(param.subsamplingRate)
      .setMinInfoGain(param.minInfoGain)
      .setCacheNodeIds(param.cacheNodeIds)

    if (param.numIterations > 0) {
      gbdt.setMaxIter(param.numIterations)
    }
    if (param.thresholds.length > 0) {
      gbdt.setThresholds(param.thresholds)
    }
    gbdt
  }

}

object GBDTComponent {

  def apply(paramStr: String): Unit = {
    new GBDTComponent()(paramStr)
  }

  def main(args: Array[String]): Unit = {
    GBDTComponent(args(0))
  }

}
