package com.feidee.fd.sml.algorithm.component.feature
import org.apache.spark.ml.PipelineStage
import org.apache.spark.ml.feature.DictMapper
import org.apache.spark.sql.DataFrame

/**
  * @Author: xiongjun
  * @Date: 2019/8/27 14:52
  */
case class DictMapperEncoderParam(override val input_pt: String,
                             override val output_pt: String,
                             override val hive_table: String,
                             override val flow_time: String,
                             override val inputCol: String,
                             override val outputCol: String,
                             override val preserveCols: String,
                             override val modelPath: String,
                             regex:String
                            ) extends FeatureParam {
  def this() = this(null, null, null, null, "input", "features", null, null,null)

  override def verify(): Unit = {
    super.verify()
    require(tool.isNotNull(regex),"regex can't for null or ''")
  }
  override def toMap: Map[String, Any] = {
    var map = super.toMap
    map += ("regex" -> regex)
    map
  }

}

class DictMapperEncoder extends AbstractFeatureEncoder[DictMapperEncoderParam] {

  override def setUp(param: DictMapperEncoderParam, data: DataFrame): Array[PipelineStage] = {
    val dictMapper = new DictMapper().setRegex(param.regex).setInputCol(param.inputCol).setOutputCol(param.outputCol)
    Array(dictMapper)
  }
}
object DictMapperEncoder {

  def apply(paramStr: String): Unit = {
    new DictMapperEncoder()(paramStr)
  }

  def main(args: Array[String]): Unit = {
    DictMapperEncoder(args(0))
  }
}
