package com.feidee.fd.sml.algorithm.component.feature

import org.apache.spark.ml.PipelineStage
import org.apache.spark.ml.feature.VectorIndexer
import org.apache.spark.sql.DataFrame

/**
  * @Author tangjinyuan
  * @Date 2019/03/21 16:33
  * @Description VectorIndexerEncoder  特征组件 帮助索引Vectors的数据集中的分类特征。
  * @Reviewer dongguosheng
  */
case class VectorIndexEncoderParam(
                                    override val input_pt: String,
                                    override val output_pt: String,
                                    override val hive_table: String,
                                    override val flow_time: String,
                                    override val inputCol: String,
                                    override val outputCol: String,
                                    override val preserveCols: String,
                                    override val modelPath: String,
                                    // 判断是否是分类变量的阈值数。如果一个特征的类别数达到这个值，则会认为是连续变量，否则是分类变量。>= 2，默认值20
                                    maxCategories: Int
                                  ) extends FeatureParam {

  def this() = this(null, null, null, null, "input", "features", null, null, 20)

  override def verify(): Unit = {
    super.verify()
    require(maxCategories>=2,"maxCategories must be greater or equal to 2!")
  }

  override def toMap: Map[String, Any] = {
    var map = super.toMap
    map += ("maxCategories" -> maxCategories)
    map
  }
}


class VectorIndexEncoder extends AbstractFeatureEncoder[VectorIndexEncoderParam] {

  override def setUp(param: VectorIndexEncoderParam, data: DataFrame): Array[PipelineStage] = {

    val vectorIndexer = new VectorIndexer()
      .setMaxCategories(param.maxCategories)
      .setInputCol(param.inputCol)
      .setOutputCol(param.outputCol)

    Array(vectorIndexer)
  }


}

object VectorIndexEncoder {

  def apply(paramStr: String): Unit = {
    new VectorIndexEncoder()(paramStr)
  }

  def main(args: Array[String]): Unit = {
    VectorIndexEncoder(args(0))
  }
}


