package cn.ipanel.bigdata.boot.source.genre

import cn.ipanel.bigdata.boot.logger.Logger
import cn.ipanel.bigdata.boot.source.{DataSource, Mapper, SparkExec, Table}
import cn.ipanel.bigdata.utils.Dictionary.F_DATE
import cn.ipanel.bigdata.utils.Util._
import org.apache.commons.lang3.exception.ExceptionUtils
import org.apache.spark.sql.{DataFrame, SaveMode, UDFRegistration}

/**
 * Author: lzz
 * Date: 2021/11/16 16:15
 */
abstract class Hive(dbName: String, tbName: String) extends Mapper with Table with SparkExec {

  import Hive._
  def udf: UDFRegistration = spark.udf
  // 是否重新分区存储，会使用 numPartitions 作为重分区数
  def isRepartitionSave: Boolean = true
  def numPartitions: Int = 0x0F

  //  如果不需要分区字段：override def partitionCols: Seq[String] = Seq()
  //  如果要添加分区字段：override def partitionCols: Seq[String] = Seq(F_DATE, F_MONTH)
  //  默认使用 F_DATE 作为分区字段
  def partitionCols: Seq[String] = Seq(F_DATE)

  override def getSourceGenre: DataSource.Genre = DataSource.GENRE_HIVE
  override def getSourceModel: DataSource.Model = DataSource.MODEL_WRITE
  override def getDBName: String = dbName
  override def getTBName: String = tbName

  override def exec(sql: String): DataFrame = {
    Logger.I(s"Exec Hive[$getDBName.$getTBName], Sql = $sql")
    try {
      spark.sql(sql)
    } catch {
      case e: Exception =>
        Logger.E(
          s"""Exec Hive[$getDBName.$getTBName] Failed.
             | Because: ${ExceptionUtils.getStackTrace(e)}
             |""".stripMargin)
        emptyTable.toDF()
    }
  }

  override def load: DataFrame = {
    Logger.I(s"Load Hive[$getDBName.$getTBName]")
    try {
//      Logger.I("load use conf hive.exec.dynamic.partition.mode: " + spark.getConf("hive.exec.dynamic.partition.mode") + " hive.exec.dynamic.partition: " + spark.getConf("hive.exec.dynamic.partition"))
      spark.table(s"$getDBName.$getTBName")
    } catch {
      case e: Exception =>
        Logger.E(
          s"""Load Hive[$getDBName.$getTBName] Failed.
             | Because: ${ExceptionUtils.getStackTrace(e)}
             |""".stripMargin)
        emptyTable.toDF()
    }
  }

  override def save(df: DataFrame, mode: SaveMode = MODE): Unit = {
    getSourceModel match {
      case DataSource.MODEL_WRITE =>
        Logger.I(s"Save Hive[$getDBName.$getTBName]")
        try {
          val colNames = getTBColumns
          var data = df
          if (nonEmpty(colNames))
            data = data.selectExpr(colNames: _*)
          var write = data.write
          if (isRepartitionSave)
            write = data.repartition(numPartitions).write.format(FORMAT_SOURCE)
          write.mode(mode).insertInto(s"$getDBName.$getTBName")
        } catch {
          case e: Exception =>
            Logger.E(
              s"""Save Hive[$getDBName.$getTBName] Failed.
                 | Because: ${ExceptionUtils.getStackTrace(e)}
                 |""".stripMargin)
        }
      case _ =>
        Logger.I(s"Refuse Save Hive[$getDBName.$getTBName]. Because: Only Read")
    }
  }
}

private[source] object Hive {

  final val FORMAT_SOURCE = "orc"
  final val MODE: SaveMode = SaveMode.Overwrite
}