package com.feidee.fd.sml.algorithm.component.feature

import org.apache.spark.ml.PipelineStage
import org.apache.spark.ml.feature.{CountVectorizer, IDF}
import org.apache.spark.sql.DataFrame

/**
  * @Author songhaicheng
  * @Date 2018/08/30
  * @Email: haicheng_song@sui.com
  */
case class TfIdfEncoderParam(
                              override val input_pt: String,
                              override val output_pt: String,
                              override val hive_table: String,
                              override val flow_time: String,
                              override val inputCol: String,
                              override val outputCol: String,
                              override val preserveCols: String,
                              override val modelPath: String,
                              // 保留特征数, > 0（算法默认 2^18^）
                              numFeatures: Int,
                              // 最小出现频数，>= 0（算法默认 0）
                              minDocFreq: Int
                            ) extends FeatureParam {

  def this() = this(null, null, null, null, "input", "features", null, null, 1 << 18, 0)

  override def verify(): Unit = {
    super.verify()
    require(numFeatures > 0, "param numFeatures must be grater than 0")
    require(minDocFreq >= 0, "param minDocFreq can't be negative")
  }

  override def toMap: Map[String, Any] = {
    var map = super.toMap
    map += ("numFeatures" -> numFeatures)
    map += ("minDocFreq" -> minDocFreq)
    map
  }
}


class TfIdfEncoder extends AbstractFeatureEncoder[TfIdfEncoderParam] {

  override def setUp(param: TfIdfEncoderParam, data: DataFrame): Array[PipelineStage] = {
    val cols = data.schema.fieldNames

    /*val hashingTF = new HashingTF()
      .setInputCol(tokenizer.getOutputCol)
      .setOutputCol(s"${tokenizer.getOutputCol}_tf")
      .setNumFeatures(param.numFeatures)*/
    val vectorizer = new CountVectorizer()
      .setInputCol(param.inputCol)
      .setOutputCol(tool.renameDuplicatedColName(s"${param.inputCol}_tf", cols))
      .setVocabSize(param.numFeatures)

    val idf = new IDF()
      .setInputCol(vectorizer.getOutputCol)
      .setOutputCol(param.outputCol)
      .setMinDocFreq(param.minDocFreq)

    Array(vectorizer, idf)
  }

}

object TfIdfEncoder {

  def apply(paramStr: String): Unit = {
    new TfIdfEncoder()(paramStr)
  }

  def main(args: Array[String]): Unit = {
    TfIdfEncoder(args(0))
  }

}
