package com.zeta.edw.venus

import org.apache.spark.sql.{DataFrame, SaveMode}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.{DateType, StructType, TimestampType}
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods.parse

trait ExtractData extends App {

  private implicit val formats: DefaultFormats.type = org.json4s.DefaultFormats

  override def process(config: Config, df: DataFrame): Unit = {

    val appConf = config.asInstanceOf[ExtractDataConfig]
    val partitionColumn = appConf.partitionColumn.toLowerCase()
    val fields = df.schema.fields.filter(f => {
      val name = f.name.toLowerCase()
      println("f.name.toLowerCase():" + name)
      name.toLowerCase() == partitionColumn
    })
    if (fields.isEmpty) {
      throw new IllegalArgumentException(s"partition column $partitionColumn not exists")
    }

    val records = (fields.head.dataType match {
      case TimestampType => df
      case _ => df.withColumn(partitionColumn, to_timestamp(col(partitionColumn), appConf.datetimeFormat))
    }).withColumn("dt",
      //      when(
      //        hour(col(partitionColumn)) >= 8, date_format(col(partitionColumn), "yyyyMMdd")
      //      ).otherwise(
      //date_format(date_sub(col(partitionColumn), 1), "yyyyMMdd")
      date_format(col(partitionColumn), "yyyyMMdd")
    )
    //    ).withColumn("shift_name",
    //      when(hour(col(partitionColumn)) >= 8 and hour(col(partitionColumn)) < 20, "A").otherwise("B")
    //    )

    val evolutionDf = schemaEvolution(config, records)
    evolutionDf.write.mode(SaveMode.Append)
      .format("parquet")
      .option("compression", "snappy")
      .partitionBy("dt")
      .saveAsTable(s"${config.hiveDatabase}.${config.hiveTable}")

  }

  override def predicates(config: Config, schema: StructType): String = {
    val appConf = config.asInstanceOf[ExtractDataConfig]
    // val zkClient = appConf.zkClient
    //val zkTablePath = appConf.zkTablePath
    val conditions = scala.collection.mutable.ArrayBuffer.empty[String]

    val checkColumn = appConf.checkColumn.toUpperCase()

    //define start time and end time
   // val defineStartChekTime = appConf.defineStartChekTime.toUpperCase()
    //val defineEndChekTime = appConf.defineEndChekTime.toUpperCase()


    val checkColumnIsTimestamp = isTimestamp(schema, checkColumn)

    val partitionColumn = appConf.partitionColumn.toUpperCase()
    val partitionColumnIsTimestamp = isTimestamp(schema, partitionColumn)

    val checkStartOption = appConf.checkStart
    val checkEndOption = appConf.checkEnd
    //val defaultCheckStart = appConf.defaultCheckStart
    //val defaultCheckEnd = appConf.defaultCheckEnd
    val defaultCheckStart23 = appConf.defaultCheckStart23
    val defaultCheckEnd23 = appConf.defaultCheckEnd23
    val datetimeFormat = appConf.datetimeFormat
    val formatMapping = appConf.datetimeFormatMapping

    //    if (checkStartOption.isEmpty || checkEndOption.isEmpty) {
    //      if (zkClient.exists(zkTablePath)) {
    //        // increment extract normally, get state from zookeeper
    //        // {"last_value":"20180320103524", "format":"yyyyMMddHHmmss"}
    //        val tableConditions = zkClient.readData[String](zkTablePath)
    //        val json = parse(tableConditions)
    //        val lastValue = (json \ "last_value").extract[String]
    //        val lastValueFormat = (json \ "format").extract[String]
    //
    //        val start = columnConditionValue(checkColumnIsTimestamp, lastValue, lastValueFormat, formatMapping)
    //        val end = columnConditionValue(checkColumnIsTimestamp, defaultCheckEnd, datetimeFormat, formatMapping)
    //        conditions += s"$checkColumn >= $start"
    //        conditions += s"$checkColumn < $end"
    //      } else {
    //        // the first time increment extract, query the data which belong to current date
    //        // query conditions should look like this:
    //        //   PERIOD_DATE >= '20180501000000' AND INTERFACE_TIME < '20180501152418'
    //        val start = columnConditionValue(partitionColumnIsTimestamp, defaultCheckStart, datetimeFormat, formatMapping)
    //        val end = columnConditionValue(checkColumnIsTimestamp, defaultCheckEnd, datetimeFormat, formatMapping)
    //        conditions += s"$partitionColumn >= $start"
    //        conditions += s"$checkColumn < $end"
    //      }
    //
    //    } else {
    //      // extract history data
    //      val start = columnConditionValue(checkColumnIsTimestamp, checkStartOption.get, datetimeFormat, formatMapping)
    //      val end = columnConditionValue(checkColumnIsTimestamp, checkEndOption.get, datetimeFormat, formatMapping)
    //      conditions += s"$checkColumn >= $start"
    //      conditions += s"$checkColumn < $end"
    //    }

    //modify 去掉zookeeper时间维护，每次跑昨天凌晨 0点到23点的数据
    if (checkStartOption.isEmpty || checkEndOption.isEmpty) {
      // the first time increment extract, query the data which belong to current date
      // query conditions should look like this:
      //   PERIOD_DATE >= '20180501000000' AND INTERFACE_TIME < '20180501152418'
      val start = columnConditionValue(partitionColumnIsTimestamp, defaultCheckStart23, datetimeFormat, formatMapping)
      val end = columnConditionValue(checkColumnIsTimestamp, defaultCheckEnd23, datetimeFormat, formatMapping)
      conditions += s"$partitionColumn >= $start"
      conditions += s"$checkColumn < $end"

    } else {
      // extract history data
      val start = columnConditionValue(checkColumnIsTimestamp, checkStartOption.get, datetimeFormat, formatMapping)
      val end = columnConditionValue(checkColumnIsTimestamp, checkEndOption.get, datetimeFormat, formatMapping)
      conditions += s"$checkColumn >= $start"
      conditions += s"$checkColumn < $end"
    }

    //    //自定义增量开始时间，结束时间
    //    if(defineStartChekTime.nonEmpty && defineEndChekTime.nonEmpty){
    //      val start = columnConditionValue(partitionColumnIsTimestamp, defineStartChekTime, datetimeFormat, formatMapping)
    //      val end = columnConditionValue(checkColumnIsTimestamp, defineEndChekTime, datetimeFormat, formatMapping)
    //      conditions += s"$partitionColumn >= $start"
    //      conditions += s"$checkColumn < $end"
    //    }
    //    if (checkStartOption.isEmpty || checkEndOption.isEmpty) {
    //      // the first time increment extract, query the data which belong to current date
    //      // query conditions should look like this:
    //      //   PERIOD_DATE >= '20180501000000' AND INTERFACE_TIME < '20180501152418'
    //      val start = columnConditionValue(partitionColumnIsTimestamp, defaultCheckStart23, datetimeFormat, formatMapping)
    //      val end = columnConditionValue(checkColumnIsTimestamp, defaultCheckEnd23, datetimeFormat, formatMapping)
    //      conditions += s"$partitionColumn >= $start"
    //      conditions += s"$checkColumn < $end"
    //
    //    } else {
    //      // extract history data
    //      val start = columnConditionValue(checkColumnIsTimestamp, checkStartOption.get, datetimeFormat, formatMapping)
    //      val end = columnConditionValue(checkColumnIsTimestamp, checkEndOption.get, datetimeFormat, formatMapping)
    //      conditions += s"$checkColumn >= $start"
    //      conditions += s"$checkColumn < $end"
    //    }

    appConf.rangeColumn.foreach(column => {
      val rangeColumn = column.toUpperCase
      if (rangeColumn != checkColumn) {
        val rangeColumnIsTimestamp = isTimestamp(schema, rangeColumn)
        val start = columnConditionValue(rangeColumnIsTimestamp, rangeStart(appConf), datetimeFormat, formatMapping)
        val end = columnConditionValue(rangeColumnIsTimestamp, rangeEnd(appConf), datetimeFormat, formatMapping)
        conditions += s"$rangeColumn >= $start"
        conditions += s"$rangeColumn <= $end"
      }
    })

    logger.info(s"query condition: ${conditions.mkString(" and ")}")
    conditions.mkString(" and ")
  }

  /**
    * handle inconsistent schema between oracle and hive
    *
    * if table in oracle add a column, add it to hive table
    * if table in oracle drop a column, set this column's value to null to current DataFrame
    *
    * the column in hive table can not be removed, due to the parquet data format
    *
    * @param config
    * @param df
    * @return
    */
  private def schemaEvolution(config: Config, df: DataFrame): DataFrame = {
    val spark = df.sparkSession
    val tableExists = spark.catalog.tableExists(config.hiveDatabase, config.hiveTable)
    if (!tableExists) {
      return df
    }

    val tableSchema = spark.table(s"${config.hiveDatabase}.${config.hiveTable}").schema
    val dfSchema = df.schema

    val dfColumnsType = dfSchema.fields.map(f => (f.name, f.dataType.catalogString)).toMap
    val tableColumnsType = tableSchema.fields.map(f => (f.name, f.dataType)).toMap

    val tableColumns = tableSchema.fields.map(_.name).toSet
    val dfColumns = dfSchema.fields.map(_.name).toSet

    val tableMissingColumns = dfColumns.diff(tableColumns)
    val dfMissingColumns = tableColumns.diff(dfColumns)

    if (tableMissingColumns.nonEmpty) {
      val addColumns = tableMissingColumns.map(c => s"`$c` ${dfColumnsType(c)}").mkString(",")
      spark.sql(s"ALTER TABLE ${config.hiveDatabase}.${config.hiveTable} ADD COLUMNS ($addColumns)")
    }

    if (dfMissingColumns.nonEmpty) {
      val addColumns = dfMissingColumns.toSeq.map(c => lit(null).cast(tableColumnsType(c)).as(c))
      df.select(col("*") +: addColumns: _*)
    } else {
      df
    }
  }

  private def isTimestamp(schema: StructType, columnName: String): Boolean = {
    val columns = schema.fields.filter(f => f.name == columnName)
    if (columns.isEmpty) {
      throw new IllegalArgumentException(s"column $columnName not exists")
    }
    columns.head.dataType match {
      case TimestampType => true
      case DateType => true
      case _ => false
    }
  }

  private def columnConditionValue(isTimestamp: Boolean, value: String, format: String, formatMapping: Map[String, String]): String = {
    if (isTimestamp) {
      s"TO_DATE('$value', '${formatMapping(format)}')"
    } else {
      s"'$value'"
    }
  }

  def rangeStart(config: ExtractDataConfig): String

  def rangeEnd(config: ExtractDataConfig): String
}
