package com.feidee.fd.sml.algorithm.component.feature

import org.apache.spark.ml.PipelineStage
import org.apache.spark.ml.feature.PolynomialExpansion
import org.apache.spark.sql.DataFrame

/**
  * @Author songhaicheng
  * @Date 2019/3/20 13:47
  * @Description
  * @Reviewer dongguosheng
  */
case class PolynomialExpanderParam(
                                    override val input_pt: String,
                                    override val output_pt: String,
                                    override val hive_table: String,
                                    override val flow_time: String,
                                    override val inputCol: String,
                                    override val outputCol: String,
                                    override val preserveCols: String,
                                    override val modelPath: String,
                                    // 多项式扩展的阶数，大于或等于 1，为 1 时不做扩展，默认 2
                                    degree: Int
                                  ) extends FeatureParam {

  def this() = this(null, null, null, null, "input", "features", null, null, 2)

  override def verify(): Unit = {
    super.verify()
    require(degree >= 1, "多项式次数必须大于或等于 1")
  }

  override def toMap: Map[String, Any] = {
    var map = super.toMap
    map += ("degree" -> degree)
    map
  }

}


class PolynomialExpander extends AbstractFeatureEncoder[PolynomialExpanderParam] {

  override def setUp(param: PolynomialExpanderParam, data: DataFrame): Array[PipelineStage] = {
    val pe = new PolynomialExpansion()
      .setInputCol(param.inputCol)
      .setOutputCol(param.outputCol)
      .setDegree(param.degree)

    Array(pe)
  }

}

object PolynomialExpander {

  def apply(paramStr: String): Unit = {
    new PolynomialExpander()(paramStr)
  }

  def main(args: Array[String]): Unit = {
    PolynomialExpander(args(0))
  }

}
