package com.feidee.fd.sml.algorithm.component.ml.regression

import com.feidee.fd.sml.algorithm.component.ml.MLParam
import org.apache.spark.ml.PipelineStage
import org.apache.spark.ml.regression.GBTRegressor

/**
  * @Author: dongguosheng
  * @Date: 2019/3/20 14:08
  * @Review songhaicheng
  * @Email: guosheng_dong@sui.com
  */
case class GBDTRegressionParam(
                                override val input_pt: String,
                                override val output_pt: String,
                                override val hive_table: String,
                                override val flow_time: String,
                                override val featuresCol: String,
                                override var labelCol: String,
                                override var predictionCol: String,
                                override val modelPath: String,
                                override val metrics: Array[String],
                                // 不纯度类型，默认 variance，仅支持 variance
                                impurity: String,
                                // 损失值类型，默认 squared，支持 [squared, absolute]
                                lossType: String,
                                // 树最大深度，>= 0，默认 5
                                maxDepth: Int,
                                // 最大装箱数，>= 2，默认 32
                                maxBins: Int,
                                // 每个节点最少实例数，>= 1，默认 1
                                minInstancesPerNode: Int,
                                // 最小信息增益，>= 0.0，默认 0.0
                                minInfoGain: Double,
                                // 最大内存 MB 单位，这个值越大，一次处理的节点划分就越多，>= 0，默认 256
                                maxMemoryInMB: Int,
                                // 是否缓存节点 id，缓存可以加速深层树的训练，默认 false
                                cacheNodeIds: Boolean,
                                // 检查点间隔，>= 1，默认 10
                                checkpointInterval: Int,
                                // 抽样率，区间(0, 1]，默认 1.0
                                subsamplingRate: Double,
                                // 随机种子 默认值 12346L
                                seed: Long,
                                // 最大迭代数，>= 0，默认 20
                                maxIter: Int,
                                // 每次优化的迭代步长，默认 0.1
                                stepSize: Double
                              ) extends MLParam {

  def this() = this(null, null, null, null, "features", "label", "prediction", null, new Array[String](0),
    "variance", "squared", 5, 32, 1, 0.0, 256, false, 10, 1.0, 123456L, 20, 0.1)

  override def verify(): Unit = {
    super.verify()
    val impurities = Array("variance")
    val lossTypes = Array("squared", "absolute")
    require(impurities.contains(impurity.toLowerCase), s"param impurity accepts ${impurities.mkString("[", ",", "]")}," +
      s" but has $impurity")
    require(lossTypes.contains(lossType.toLowerCase), s"param lossType accepts ${lossTypes.mkString("[", ",", "]")}," +
      s" but has $lossType")
    require(maxDepth >= 0, "param maxDepth can't be negative")
    require(maxIter >= 0, "param maxIter can't be negative")
    require(subsamplingRate > 0 && subsamplingRate <= 1, "param subsamplingRate's range is (0, 1]")
    require(maxIter >= 0, "param maxIter can't be negative")
    require(checkpointInterval >= 1, "param checkpointInterval must be" +
      " equals to -1 or not less than 1")
    require(subsamplingRate > 0 && subsamplingRate <= 1, "param subsamplingRate's range is (0, 1]")
  }

  override def toMap: Map[String, Any] = {
    var map = super.toMap
    map += ("impurity" -> impurity)
    map += ("lossType" -> lossType)
    map += ("maxDepth" -> maxDepth)
    map += ("maxBins" -> maxBins)
    map += ("minInstancesPerNode" -> minInstancesPerNode)
    map += ("minInfoGain" -> minInfoGain)
    map += ("cacheNodeIds" -> cacheNodeIds)
    map += ("seed" -> seed)
    map += ("subsamplingRate" -> subsamplingRate)
    map += ("stepSize" -> stepSize)
    map += ("checkpointInterval" -> checkpointInterval)
    map += ("maxIter" -> maxIter)
    map += ("stepSize" -> stepSize)
    map
  }
}

class GBDTRegressionComponent extends AbstractRegressionComponent[GBDTRegressionParam] {
  override def setUp(param: GBDTRegressionParam): PipelineStage = {
    val gbt = new GBTRegressor()
      .setCacheNodeIds(param.cacheNodeIds)
      .setCheckpointInterval(param.checkpointInterval)
      .setImpurity(param.impurity)
      .setLossType(param.lossType)
      .setMaxBins(param.maxBins)
      .setMaxDepth(param.maxDepth)
      .setMaxMemoryInMB(param.maxMemoryInMB)
      .setMinInfoGain(param.minInfoGain)
      .setMinInstancesPerNode(param.minInstancesPerNode)
      .setStepSize(param.stepSize)
      .setSubsamplingRate(param.subsamplingRate)
      .setLabelCol(param.labelCol)
      .setPredictionCol(param.predictionCol)
      .setFeaturesCol(param.featuresCol)
      .setMaxIter(param.maxIter)
      .setSeed(param.seed)

    gbt
  }

}

object GBDTRegressionComponent {
  def apply(paramStr: String): Unit = {
    new GBDTRegressionComponent()(paramStr)
  }

  def main(args: Array[String]): Unit = {
    GBDTRegressionComponent(args(0))
  }
}