package com.feidee.fd.sml.algorithm.component.feature

import org.apache.spark.ml.PipelineStage
import org.apache.spark.ml.feature.MaxAbsScaler
import org.apache.spark.sql.DataFrame

/**
  * @author YongChen
  * @date 2019/3/22 3:18
  * @description
  * @reviewer dongguosheng
  */
case class MaxAbsScaleEncoderParam(
                                    override val input_pt: String,
                                    override val output_pt: String,
                                    override val hive_table: String,
                                    override val flow_time: String,
                                    override val inputCol: String,
                                    override val outputCol: String,
                                    override val preserveCols: String,
                                    override val modelPath: String
                                  ) extends FeatureParam {

  def this() = this(null, null, null, null, "input", "features", null, null)

}


class MaxAbsScaleEncoder extends AbstractFeatureEncoder[MaxAbsScaleEncoderParam] {

  override def setUp(param: MaxAbsScaleEncoderParam, data: DataFrame): Array[PipelineStage] ={

    val mas = new MaxAbsScaler()
      .setInputCol(param.inputCol)
      .setOutputCol(param.outputCol)

    Array(mas)

  }

}

object MaxAbsScaleEncoder {

  def apply(paramStr: String): Unit = {
    new MaxAbsScaleEncoder()(paramStr)
  }

  def main(args: Array[String]): Unit = {
    MaxAbsScaleEncoder(args(0))
  }

}
