package cn.ipanel.bigdata.boot.source.genre

import cn.ipanel.bigdata.boot.date.{Day, Time}
import cn.ipanel.bigdata.boot.logger.Logger
import cn.ipanel.bigdata.boot.period.Period
import cn.ipanel.bigdata.boot.source.{DataSource, LocalExec, Mapper, SparkExec, Table}
import cn.ipanel.bigdata.utils.Dictionary.{F_DATE, F_PERIOD_TIME}
import org.apache.commons.lang3.exception.ExceptionUtils
import org.apache.phoenix.jdbc.PhoenixConnection
import org.apache.spark.sql.{DataFrame, Row, SQLContext, SaveMode, UDFRegistration}

import java.sql.{DriverManager, PreparedStatement}
import java.util.Properties

/**
 * Author: lzz
 * Date: 2021/11/17 17:43
 */
abstract class Phoenix(dbName: String, tbName: String) extends Mapper with Table with SparkExec with LocalExec {

  import Phoenix._

  private[this] final lazy val SC: SQLContext = SQLContext.getOrCreate(spark.sparkContext)

  def udf: UDFRegistration = spark.udf
  override def getSourceGenre: DataSource.Genre = DataSource.GENRE_PHOENIX
  override def getSourceModel: DataSource.Model = DataSource.MODEL_WRITE
  override def getDBName: String = dbName
  override def getTBName: String = tbName

  lazy val LINK: Map[String, String] = Map(PARAM_TABLE -> s"$getDBName.$getTBName",
                                           PARAM_ZKURL -> database.url)
  lazy val OPTIONS: Map[String, String] = {
    import scala.collection.JavaConversions._
    var map: Map[String, String] = Map()
    for (kv <- database.options) {
      map = map ++ Map(kv._1 -> kv._2)
    }
    map
  }
  lazy val PROPERTIES: Properties = {
    val prop = new Properties()
    OPTIONS.foreach(kv => {
      prop.setProperty(kv._1, kv._2)
    })
    prop
  }

  lazy val URL: String = s"${database.url}"

  override def _local_get(sql: String): Seq[Map[String, String]] = {
    Logger.I(s"Local_Get Phoenix[$getDBName.$getTBName], Sql = $sql")
    val time = Time.now
    var conn: PhoenixConnection = null
    var stmt: PreparedStatement = null
    try {
      Class.forName(protocol.driver)
      conn = DriverManager.getConnection(URL, PROPERTIES).asInstanceOf[PhoenixConnection]
      conn.setAutoCommit(true)
      stmt = conn.prepareStatement(sql)
      _to(stmt.executeQuery())
    } catch {
      case e: Exception =>
        Logger.E(
          s"""Local_Get Phoenix[$getDBName.$getTBName] Failed.
             | Because: ${ExceptionUtils.getStackTrace(e)}
             |""".stripMargin)
        null
    } finally {
      try {
        if (stmt != null) stmt.close()
        if (conn != null) conn.close()
      } catch {
        case e: Exception =>
          Logger.E(
            s"""Local_Get Phoenix[$getDBName.$getTBName] Connection Close Failed.
               | Because: ${ExceptionUtils.getStackTrace(e)}
               |""".stripMargin)
      }
      Logger.I(s"Local_Get Phoenix[$getDBName.$getTBName], Time = ${time.diffToNowS}s")
    }
  }

  override def _local_sql(sql: String): Int = {
    Logger.I(s"Local_Sql Phoenix[$getDBName.$getTBName], Sql = $sql")
    val time = Time.now
    var conn: PhoenixConnection = null
    var stmt: PreparedStatement = null
    var code = 0
    try {
      Class.forName(protocol.driver)
      conn = DriverManager.getConnection(URL, PROPERTIES).asInstanceOf[PhoenixConnection]
      conn.setAutoCommit(true)
      stmt = conn.prepareStatement(sql)
      code = stmt.executeUpdate()
      conn.commit()
      code
    } catch {
      case e: Exception =>
        Logger.E(
          s"""Local_Sql Phoenix[$getDBName.$getTBName] Failed.
             | Because: ${ExceptionUtils.getStackTrace(e)}
             |""".stripMargin)
        0
    } finally {
      try {
        if (stmt != null) stmt.close()
        if (conn != null) conn.close()
      } catch {
        case e: Exception =>
          Logger.E(
            s"""Local_Sql Phoenix[$getDBName.$getTBName] Connection Close Failed.
               | Because: ${ExceptionUtils.getStackTrace(e)}
               |""".stripMargin)
      }
      Logger.I(s"Local_Sql Phoenix[$getDBName.$getTBName], Count = $code, Time = ${time.diffToNowS}s")
    }
  }

  @deprecated
  def truncate(): Unit = {
    val ret: Int = _local_sql(s"delete from $getDBName.$getTBName")
    Logger.I(s"Truncate Phoenix[$getDBName.$getTBName], Count = $ret")
  }

  override def exec(sql: String): DataFrame = {
    Logger.I(s"Exec Phoenix[$getDBName.$getTBName], Sql = $sql")
    try {
          SC
           .read
           .format(PARAM_JDBC)
           .options(Map(PARAM_DRIVER -> protocol.driver,
                        PARAM_DBTABLE -> sql,
                        PARAM_URL -> URL) ++ OPTIONS)
           .load
    } catch {
      case e: Exception =>
        Logger.E(
          s"""Exec Phoenix[$getDBName.$getTBName] Failed.
             | Because: ${ExceptionUtils.getStackTrace(e)}
             |""".stripMargin)
        emptyTable.toDF()
    }
  }

  /**
   * 通过周期类型删除
   * @param day
   */
  def deleteByFullDay(day: Day):Unit = {
    val delRecord = _local_sql(s"delete from $getDBName.$getTBName where $F_DATE = ${day.toDate}")
    Logger.I("delete record: " + delRecord)
  }

  /**
   * 通过周期类型删除，键 {@link cn.ipanel.bigdata.dw.dws.parseDateToPhoenixDate() } 方法的定义
   * @param day
   */
  def deleteBySimpleDay(day: Day):Unit = {
    val delRecord = _local_sql(s"delete from $getDBName.$getTBName where $F_PERIOD_TIME = ${day.toSimpleDate}")
    Logger.I("delete record: " + delRecord)
  }

  /**
   *
   * @param day
   */
  def deleteByPeriodTime(periodTime: Int): Unit = {
    val delRecord = _local_sql(s"delete from $getDBName.$getTBName where $F_PERIOD_TIME = ${periodTime}")
    Logger.I("delete record: " + delRecord)
  }



  override def load: DataFrame = {
    Logger.I(s"Load Phoenix[$getDBName.$getTBName]")
    try {
      // 拿到的都是大写的列名，要转一下
//      spark.read
//           .format(protocol.source)
//           .options(LINK ++ OPTIONS)
//           .load

      var df = spark.read
        .format(protocol.source)
        .options(LINK ++ OPTIONS)
        .load
      df.schema.fieldNames.foreach(x => df = df.withColumnRenamed(x, x.toLowerCase()))

      df
    } catch {
      case e: Exception =>
        Logger.E(
          s"""Load Phoenix[$getDBName.$getTBName] Failed.
             | Because: ${ExceptionUtils.getStackTrace(e)}
             |""".stripMargin)
        emptyTable.toDF()
    }
  }

  override def save(df: DataFrame, mode: SaveMode = SaveMode.Overwrite): Unit = {
    if(df != null) {
      getSourceModel match {
        case DataSource.MODEL_WRITE =>
          Logger.I(s"Save Phoenix[$getDBName.$getTBName]")
          val time = Time.now
          try {
            df.selectExpr(getTBColumns: _*)
              .write
              .format(protocol.source)
              .mode(mode)
              .options(LINK ++ OPTIONS)
              .save
          } catch {
            case e: Exception =>
              Logger.E(
                s"""Save Phoenix[$getDBName.$getTBName] Failed.
                   | Because: ${ExceptionUtils.getStackTrace(e)}
                   |""".stripMargin)
          }
        case _ =>
          Logger.I(s"Refuse Save Phoenix[$getDBName.$getTBName]. Because: Only Read")
      }
    }
  }
}

protected[source] object Phoenix {

  final val PARAM_JDBC    : String = "jdbc"
  final val PARAM_ZKURL   : String = "zkUrl"
  final val PARAM_URL     : String = "url"
  final val PARAM_DRIVER  : String = "driver"
  final val PARAM_TABLE   : String = "table"
  final val PARAM_DBTABLE : String = "dbtable"
}
