package org.apache.spark.ml.feature

import org.apache.spark.SparkException
import org.apache.spark.ml.Transformer
import org.apache.spark.ml.linalg.{Vector, VectorUDT, Vectors}
import org.apache.spark.ml.param.shared.{HasInputCol, HasOutputCol}
import org.apache.spark.ml.param.{Param, ParamMap, ParamValidators}
import org.apache.spark.ml.util.{DefaultParamsReadable, DefaultParamsWritable, Identifiable}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.{NumericType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Dataset}

/**
  * @author songhaicheng
  * @date 2019/12/24 17:14
  * @description 列运算
  * @reviewer
  */
class Calculator(override val uid: String) extends Transformer with HasInputCol with HasOutputCol with DefaultParamsWritable {

  def this() = this(Identifiable.randomUID("calculator"))

  def setInputCol(value: String): this.type = set(inputCol, value)

  def setOutputCol(value: String): this.type = set(outputCol, value)

  final val factor: Param[String] = new Param[String](this, "factor", "用来与 input 列进行操作的因子参数")

  def getFactor: String = $(factor)

  def setFactor(value: String): this.type = set(factor, value)

  final val operator: Param[String] = new Param[String](this, "operator", "操作符，支持加减乘除",
    ParamValidators.inArray(Calculator.supportedOperators))

  def getOperator: String = $(operator)

  def setOperator(value: String): this.type = set(operator, value)

  final val inputDataType = new Param[String](this,"inputDataType","input col's dataType, will be set automatically")

  def getInputDataType: String = $(inputDataType)

  private[feature] def setInputDataType(value: String): this.type = set(inputDataType, value)


  setDefault(factor -> "", operator -> Calculator.OP_PLUS)


  override def transform(dataset: Dataset[_]): DataFrame = {
    transformSchema(dataset.schema, logging = true)
    val fac = $(factor).toDouble
    val (numOp, vecOp) = $(operator) match {
      case Calculator.OP_PLUS =>
        (udf {col: Double => {
          col + fac
        }}, udf {vec: Vector => {
          Vectors.dense(vec.toArray.map(_ + fac))
        }})
      case Calculator.OP_MINUS =>
        (udf {col: Double => {
          col - fac
        }}, udf {vec: Vector => {
          Vectors.dense(vec.toArray.map(_ - fac))
        }})
      case Calculator.OP_MUL =>
        (udf {col: Double => {
          col * fac
        }}, udf {vec: Vector => {
          Vectors.dense(vec.toArray.map(_ * fac))
        }})
      case Calculator.OP_DIV =>
        (udf {col: Double => {
          col / fac
        }}, udf {vec: Vector => {
          Vectors.dense(vec.toArray.map(_ / fac))
        }})
    }

    dataset.schema($(inputCol)).dataType match {
      case _: NumericType =>
        dataset.withColumn($(outputCol), numOp(dataset.col($(inputCol))))
      case _: VectorUDT =>
        dataset.withColumn($(outputCol), vecOp(dataset.col($(inputCol))))
    }
  }

  override def copy(extra: ParamMap): Calculator = defaultCopy(extra)

  override def transformSchema(schema: StructType): StructType = {
    schema($(inputCol)).dataType match {
      case _: NumericType =>
        setInputDataType(InputDataType.NUMBER)
      case _: VectorUDT =>
        setInputDataType(InputDataType.VECTOR)
      case _ => throw new SparkException("inputCol has to be Numeric or Vector")
    }
    require(!schema.fieldNames.contains($(outputCol)),
      s"Output column ${$(outputCol)} already exists.")
    val outputFields = schema.fields :+ StructField($(outputCol), schema($(inputCol)).dataType, nullable = false)
    StructType(outputFields)
  }

}

object Calculator extends DefaultParamsReadable[Calculator] {
  private[feature] val COLUMN_FACTOR: String = "column"
  private[feature] val CONSTANT_FACTOR: String = "constant"
  private[feature] val supportedFactorTypes: Array[String] =
    Array(COLUMN_FACTOR, CONSTANT_FACTOR)

  private[feature] val OP_PLUS: String = "+"
  private[feature] val OP_MINUS: String = "-"
  private[feature] val OP_MUL: String = "*"
  private[feature] val OP_DIV: String = "/"
  private[feature] val supportedOperators: Array[String] =
    Array(OP_PLUS, OP_MINUS, OP_MUL, OP_DIV)

  override def load(path: String): Calculator = super.load(path)
}

object InputDataType {
  final val NUMBER = "number"
  final val VECTOR = "vector"
}
