package com.feidee.fdspark.transformer

import java.text.SimpleDateFormat
import java.util.Calendar

import org.apache.spark.SparkException
import org.apache.spark.ml.Transformer
import org.apache.spark.ml.attribute.NominalAttribute
import org.apache.spark.ml.param.{Param, ParamMap}
import org.apache.spark.ml.util.{DefaultParamsReadable, DefaultParamsWritable, Identifiable}
import org.apache.spark.sql.functions.udf
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.{DataFrame, Dataset}

/**
  * @Author songhaicheng
  * @Date 2019/6/18 13:19
  * @Description 根据日期格式，从目标列值中解析出时间信息
  * @Reviewer
  */
class DateParser(override val uid: String) extends Transformer with DefaultParamsWritable {

  def this() = this(Identifiable.randomUID("parse_date"))

  final val inputCol = new Param[String](this, "inputCol", "column who holds datetime info")

  def setInputCol(value: String): this.type = set(inputCol, value)

  def getInputCol: String = $(inputCol)

  final val format = new Param[String](this, "format", "datetime format")

  def setFormat(value: String): this.type = set(format, value)

  def getFormat: String = $(format)

  final val hourOfDayCol = new Param[String](this, "hourOfDayCol", "column who saves hourOfDay")

  def setHourOfDayCol(value: String): this.type = set(hourOfDayCol, value)

  def getHourOfDayCol: String = $(hourOfDayCol)

  final val dayOfWeekCol = new Param[String](this, "dayOfWeekCol", "column who saves dayOfWeek")

  def setDayOfWeekCol(value: String): this.type = set(dayOfWeekCol, value)

  def getDayOfWeekCol: String = $(dayOfWeekCol)

  final val dayOfMonthCol = new Param[String](this, "dayOfMonthCol", "column who saves dayOfMonth")

  def setDayOfMonthCol(value: String): this.type = set(dayOfMonthCol, value)

  def getDayOfMonthCol: String = $(dayOfMonthCol)

  final val monthOfYearCol = new Param[String](this, "monthOfYearCol", "column who saves monthOfYear")

  def setMonthOfYear(value: String): this.type = set(monthOfYearCol, value)

  def getMonthOfYear: String = $(monthOfYearCol)

  setDefault(inputCol, "input")
  // 默认日期格式 yyyy-MM-dd HH:mm:ss
  setDefault(format, "yyyy-MM-dd HH:mm:ss")

  override def transform(dataset: Dataset[_]): DataFrame = {

    transformSchema(dataset.schema)
    var result = dataset

    val dateFormat = new SimpleDateFormat($(format))
    val cal = Calendar.getInstance

    val parseDate = (signal: Int) => udf { str: String => {
      try {
        cal.setTime(dateFormat.parse(str))
        // 向前挪一天，因为 Calendar 的一周第一天是周日，而不是周一

      } catch {
        case _: Exception => throw new SparkException(s"用 $getFormat 解析 $str 出错")
      }
      signal match {
        case Calendar.MONTH =>
          logInfo(s"current date: ${dateFormat.format(cal.getTime)}")
          (cal.get(signal) + 1).toString
        case Calendar.DAY_OF_MONTH =>
          (cal.get(signal)).toString
        case Calendar.DAY_OF_WEEK =>
          val date = dateFormat.parse(str)
          val calendar = Calendar.getInstance()
          calendar.setTimeInMillis(date.getTime)
          calendar.roll(Calendar.DAY_OF_WEEK, -1)
          String.valueOf(calendar.get(Calendar.DAY_OF_WEEK))
        case Calendar.HOUR_OF_DAY =>
          (cal.get(signal)).toString
        case _ =>
          throw new Exception("match failure,please check date format")
      }
    }
    }
    if (isDefined(hourOfDayCol)) {
      result = result.withColumn($(hourOfDayCol), parseDate(Calendar.HOUR_OF_DAY)(dataset($(inputCol))))
    }
    if (isDefined(dayOfWeekCol)) {
      result = result.withColumn($(dayOfWeekCol), parseDate(Calendar.DAY_OF_WEEK)(dataset($(inputCol))))
    }
    if (isDefined(dayOfMonthCol)) {
      result = result.withColumn($(dayOfMonthCol), parseDate(Calendar.DAY_OF_MONTH)(dataset($(inputCol))))
    }
    if (isDefined(monthOfYearCol)) {
      result = result.withColumn($(monthOfYearCol), parseDate(Calendar.MONTH)(dataset($(inputCol))))
    }
    result.toDF
  }

  override def copy(extra: ParamMap): Transformer = defaultCopy(extra)

  override def transformSchema(schema: StructType): StructType = {
    val inputColName = $(inputCol)
    require(schema.fieldNames.contains(inputColName), s"inputCol $inputColName doesn't exist")
    // 如果不设置输出那些日期信息，则全部输出，且输出字段名字为 inputCol_对应信息
    if (!isDefined(dayOfWeekCol) && !isDefined(dayOfMonthCol) && !isDefined(monthOfYearCol) && !isDefined(hourOfDayCol)) {
      setDayOfWeekCol(s"${getInputCol}_dayOfWeek")
      setDayOfMonthCol(s"${getInputCol}_dayOfMonth")
      setMonthOfYear(s"${getInputCol}_monthOfYear")
      setHourOfDayCol(s"${getInputCol}_hourOfDay")
    }
    validateAndAddSchema(schema)
  }

  protected def validateAndAddSchema(schema: StructType): StructType = {
    val inputFields = schema.fields
    var outputFields = inputFields
    if (isDefined(hourOfDayCol)) {
      require(!schema.fieldNames.contains(getHourOfDayCol), s"column $hourOfDayCol already existed")
      val attr = NominalAttribute.defaultAttr.withName(getHourOfDayCol)
      outputFields = outputFields :+ attr.toStructField()
    }
    if (isDefined(dayOfWeekCol)) {
      require(!schema.fieldNames.contains(getDayOfWeekCol), s"column $getDayOfWeekCol already existed")
      val attr = NominalAttribute.defaultAttr.withName(getDayOfWeekCol)
      outputFields = outputFields :+ attr.toStructField()
    }
    if (isDefined(dayOfMonthCol)) {
      require(!schema.fieldNames.contains(getDayOfMonthCol), s"column $getDayOfMonthCol already existed")
      val attr = NominalAttribute.defaultAttr.withName(getDayOfMonthCol)
      outputFields = outputFields :+ attr.toStructField()
    }
    if (isDefined(monthOfYearCol)) {
      require(!schema.fieldNames.contains(getMonthOfYear), s"column $getMonthOfYear already existed")
      val attr = NominalAttribute.defaultAttr.withName(getMonthOfYear)
      outputFields = outputFields :+ attr.toStructField()
    }
    StructType(outputFields)
  }
}

object DateParser extends DefaultParamsReadable[DateParser] {

}
