package com.feidee.fdspark.transformer

import org.apache.spark.ml.{Transformer, UnaryTransformer}
import org.apache.spark.ml.param.{Param, ParamMap}
import org.apache.spark.ml.util.{DefaultParamsReadable, DefaultParamsWritable, Identifiable}
import org.apache.spark.sql.{Column, DataFrame, Dataset, functions}
import org.apache.spark.sql.types._
import org.apache.spark.sql.functions.udf

/**
  * @Author xiongjun
  * @Date 2019/6/24 14:40
  * @Description 异常值处理
  * @Reviewer
  */
class ExceptionalValuesHandler(override val uid: String)
  extends Transformer with DefaultParamsWritable {

  def this() = this(Identifiable.randomUID("exceptional_val"))

  final val defineExceptional = new Param[String](this, "defineExceptional",
    "define what exceptional value is it")
  final val operator = new Param[String](this,"operator","operational character")
  final val inputCol = new Param[String](this, "inputCol", "input column name")
  final val outputCol = new Param[String](this, "outputCol", "output column name")
  final val solution = new Param[String](this, "solution",
    "solution for completing exceptional values")
  final val inputColDataType = new Param[String](this,"excepDataType","exception value data type")

  def setInputColDataType(value:String):this.type = set(inputColDataType,value)
  def getInputColDataType=$(inputColDataType)
  def setInputCol(value:String):this.type = set(inputCol,value)
  def getInputCol:String = $(inputCol)
  def setOutputCol(value:String):this.type = set(outputCol,value)
  def getOutputCol:String = $(outputCol)
  def setOperator(value:String):this.type = set(operator,value)
  def getOperator:String = $(operator)
  def setDefineExceptional(value: String): this.type = set(defineExceptional, value)



  def getDefineExceptional: String = {
    if (isDefined(defineExceptional)) {
      $(defineExceptional)
    } else {
      // 不填此参数时，则将 null 作为空值
      null
    }
  }



  def setSolution(value: String): this.type = set(solution, value)

  def getSolution: String = $(solution)

  // 默认填上空字符串
  setDefault(solution, "")
  setDefault(operator,"==")
  setDefault(inputCol,"inputCol")
  setDefault(outputCol,"outputCol")
  //  override protected def outputDataType: DataType = StringType
  override def transform(dataset: Dataset[_]): DataFrame = {
    var result = dataset
    if(!isDefined(defineExceptional)){
      $(inputColDataType) match {
        case InputColDataType.String=>result = result.withColumn($(outputCol),result($(inputCol))).na.fill($(solution),Array($(outputCol)))
        case InputColDataType.Int=>result = result.withColumn($(outputCol),result($(inputCol)).cast(IntegerType)).na.fill($(solution).toInt,Array($(outputCol)))
        case InputColDataType.Double=>result = result.withColumn($(outputCol),result($(inputCol)).cast(DoubleType)).na.fill($(solution).toDouble,Array($(outputCol)))
      }
    }else{
      val func = $(inputColDataType) match {
        case InputColDataType.String => udf{value:String =>{if(value == getDefineExceptional) getSolution else value}}
        case InputColDataType.Int => udf{value:String => {
          val value_ = value.toInt
          $(operator) match {
            case "==" => if (value_ == getDefineExceptional.toInt) getSolution.toInt else value_
            case "<" => if(value_ < getDefineExceptional.toInt) getSolution.toInt else value_
            case ">" => if (value_ > getDefineExceptional.toInt) getSolution.toInt else value_
            case "<=" => if(value_ <= getDefineExceptional.toInt) getSolution.toInt else value_
            case ">=" => if(value_ >= getDefineExceptional.toInt) getSolution.toInt else value_
          }
        }}
        case InputColDataType.Double => udf{value:String=>{
          val value_ = value.toDouble
          $(operator) match {
            case "==" => if (value_ == getDefineExceptional.toDouble) getSolution.toDouble else value_
            case "<" => if(value_ < getDefineExceptional.toDouble) getSolution.toDouble else value_
            case ">" => if (value_ > getDefineExceptional.toDouble) getSolution.toDouble else value_
            case "<=" => if(value_ <= getDefineExceptional.toDouble) getSolution.toDouble else value_
            case ">=" => if(value_ >= getDefineExceptional.toDouble) getSolution.toDouble else value_
          }
        }}
      }
      result = result.withColumn($(outputCol),func(result.col($(inputCol))))
  }
    result.toDF()
  }

  override def copy(extra: ParamMap): Transformer = defaultCopy(extra)

  override def transformSchema(schema: StructType): StructType = {
    require(schema.fieldNames.contains($(inputCol)), s"inputCol ${$(inputCol)} doesn't exist")
    validateAndAddSchema(schema, $(outputCol))
    schema
  }
  protected def validateAndAddSchema(schema: StructType, colName: String): StructType = {
    require(!schema.fieldNames.contains(colName), s"column $colName already existed")
    $(inputColDataType) match {
      case FieldInfo.INT =>
        schema.add(StructField(colName,IntegerType))
      case FieldInfo.DOUBLE =>
        schema.add(StructField(colName,DoubleType))
      case FieldInfo.STRING | FieldInfo.SEQUENCE_INT | FieldInfo.SEQUENCE | FieldInfo.SEQUENCE_FLOAT =>
        schema.add(StructField(colName,StringType))
      case _=>
        throw new Exception("inputColDataType need string/int/double")
    }

  }
}

object ExceptionalValuesHandler extends DefaultParamsReadable[ExceptionalValuesHandler] {}

object InputColDataType{
  final val String = "string"
  final val Int = "int"
  final val Double = "double"
}
object FieldInfo{
  final val INT = "int"
  final val DOUBLE = "double"
  final val STRING = "string"
  final val SEQUENCE_INT = "sequence_int"
  final val SEQUENCE_FLOAT = "sequence_float"
  final val SEQUENCE = "sequence"
  final val ARRAY = "array"
}
