package com.feidee.fd.sml.algorithm.component.feature

import org.apache.spark.ml.PipelineStage
import org.apache.spark.ml.feature.DCT
import org.apache.spark.sql.DataFrame

/**
  * @Author songhaicheng
  * @Date 2019/3/25 19:02
  * @Description
  * @Reviewer dongguosheng
  */
case class DCTEncoderParam(
                            override val input_pt: String,
                            override val output_pt: String,
                            override val hive_table: String,
                            override val flow_time: String,
                            override val inputCol: String,
                            override val outputCol: String,
                            override val preserveCols: String,
                            override val modelPath: String,
                            // 是否进行逆转换，默认 false
                            inverse: Boolean
                          ) extends FeatureParam {

  def this() = this(null, null, null, null, "input", "features", null, null, false)

  override def toMap: Map[String, Any] = {
    var map = super.toMap
    map += ("inverse" -> inverse)
    map
  }
}


class DCTEncoder extends AbstractFeatureEncoder[DCTEncoderParam] {

  override def setUp(param: DCTEncoderParam, data: DataFrame): Array[PipelineStage] = {
    val dct = new DCT()
      .setInputCol(param.inputCol)
      .setOutputCol(param.outputCol)
      .setInverse(param.inverse)

    Array(dct)
  }

}

object DCTEncoder {
  def apply(paramStr: String): Unit = {
    new DCTEncoder()(paramStr)
  }

  def main(args: Array[String]): Unit = {
    DCTEncoder(args(0))
  }
}
