package com.feidee.fd.sml.algorithm.component.feature

import org.apache.spark.ml.PipelineStage
import org.apache.spark.ml.feature.{Calculator => Cal}
import org.apache.spark.sql.DataFrame

/**
  * @author songhaicheng
  * @date 2019/12/25 16:37
  * @description
  * @reviewer
  */
case class CalculatorParam(
                            override val input_pt: String,
                            override val output_pt: String,
                            override val hive_table: String,
                            override val flow_time: String,
                            override val inputCol: String,
                            override val outputCol: String,
                            override val preserveCols: String,
                            override val modelPath: String,
                            // 因子参数
                            factor: String,
                            // 运算符
                            operator: String
                          ) extends FeatureParam {

  def this() = this(null, null, null, null, "input", "output", null, null, null, null)

  override def verify(): Unit = {
    super.verify()
    require(tool.isNotNull(factor), "param factor can't be null")
    require(tool.isNotNull(operator), "param operator can't be null")
  }

  override def toMap: Map[String, Any] = {
    var map = super.toMap
    map += ("factor" -> factor)
    map += ("operator" -> operator)
    map
  }
}

class Calculator extends AbstractFeatureEncoder[CalculatorParam] {

  override def setUp(param: CalculatorParam, data: DataFrame): Array[PipelineStage] = {
    val cal = new Cal()
      .setInputCol(param.inputCol)
      .setOutputCol(param.outputCol)
      .setFactor(param.factor)
      .setOperator(param.operator)
    Array(cal)
  }
}


object Calculator {

  def apply(paramStr: String): Unit = {
    new Calculator()(paramStr)
  }

  def main(args: Array[String]): Unit = {
    Calculator(args(0))
  }
}