package com.zt.bigdata.template.spark.jdbc

import java.sql._
import java.time.{LocalDate, LocalDateTime, LocalTime}

import com.zaxxer.hikari.HikariDataSource
import org.apache.spark.internal.Logging

/**
  *
  */
object JDBCTemplate extends Logging with Serializable {
  private val instances: ThreadLocal[HikariDataSource] = new ThreadLocal[HikariDataSource]()

  def init(jdbcUrl: String,
           username: String,
           password: String,
           connectionTimeout: Long = 30000,
           idleTime: Long = 600000,
           maxLifetime: Long = 1800000,
           maxPoolSize: Int = 20,
           autoCommit: Boolean = true
          ): Unit = {
    if (instances.get() == null || instances.get().isClosed) {
      val instance = new HikariDataSource()
      instance.setJdbcUrl(jdbcUrl)
      if (username != null && username.nonEmpty) {
        instance.setUsername(username)
        instance.setPassword(password)
      }
      instance.setConnectionTimeout(connectionTimeout)
      instance.setIdleTimeout(idleTime)
      instance.setMaxLifetime(maxLifetime)
      instance.setMaximumPoolSize(maxPoolSize)
      if (jdbcUrl.startsWith("jdbc:mysql")) {
        classOf[com.mysql.jdbc.Driver]
        instance.setDriverClassName("com.mysql.jdbc.Driver")
      }
      //      if (jdbcUrl.startsWith("jdbc:phoenix")) {
      //        classOf[org.apache.phoenix.queryserver.client.Driver]
      //        instance.setDriverClassName("org.apache.phoenix.queryserver.client.Driver")
      //      }
      instance.setAutoCommit(autoCommit)
      instance.setConnectionTestQuery("SELECT 1")
      instances.set(instance)
    }
  }

  def close(): Unit = {
    if (instances.get() != null && !instances.get().isClosed) {
      instances.get().close()
    }
    if (instances.get() != null) {
      instances.remove()
    }
  }

  sys.addShutdownHook(() -> close())

  def getConnection: Connection = {
    instances.get().getConnection()
  }

  def packageParameters(stmt: PreparedStatement, index: Int, value: Any): Unit = {
    value match {
      case v: LocalDateTime => stmt.setTimestamp(index, Timestamp.valueOf(v))
      case v: LocalDate => stmt.setDate(index, Date.valueOf(v))
      case v: LocalTime => stmt.setTime(index, Time.valueOf(v))
      case v: java.util.Date => stmt.setDate(index, new Date(v.getTime))
      case v: Int => stmt.setInt(index, v)
      case _ => stmt.setObject(index, value.asInstanceOf[Object])
    }
  }

  def executeUpdate(sql: String,
                    parameters: Seq[Any],
                    connection: Connection = null,
                    autoClose: Boolean = true): Boolean = {
    var stmt: PreparedStatement = null
    var conn: Connection = connection
    try {
      if (conn != null) {
        conn = getConnection
      }
      if (!autoClose) {
        conn.setAutoCommit(false)
      }
      stmt = conn.prepareStatement(sql)
      parameters.zipWithIndex.foreach(
        param => packageParameters(stmt, param._2 + 1, param._1)
      )
      stmt.execute()
    } catch {
      case e: Throwable =>
        log.error("ExecuteUpate error", e)
        throw e
    } finally {
      if (stmt != null && !stmt.isClosed) {
        stmt.close()
      }
      if (conn != null && autoClose && !conn.isClosed) {
        conn.close()
      }
    }
  }

  def prepareExecuteBatchUpdate(sql: String, conn: Connection = getConnection): PreparedStatement = {
    conn.setAutoCommit(false)
    conn.prepareStatement(sql)
  }

  def addExecuteBatchUpdate(parameters: Seq[Any], stmt: PreparedStatement): Unit = {
    parameters.zipWithIndex.foreach(
      param => packageParameters(stmt, param._2 + 1, param._1))
    stmt.addBatch()
  }

  def submitExecuteBatchUpdate(stmt: PreparedStatement, autoClose: Boolean = true): Int = {
    val counter = stmt.executeBatch().length
    val conn = stmt.getConnection
    conn.commit()
    if (autoClose) {
      stmt.close()
      if (!conn.isClosed) {
        conn.close()
      }
    }
    counter
  }

  def submitExecutesBatchUpdate(stmts: Seq[PreparedStatement], conn: Connection, autoClose: Boolean = true): Seq[Int] = {
    val counters = stmts.map(_.executeBatch().length)
    conn.commit()
    if (autoClose) {
      stmts.filter(!_.isClosed).foreach(_.close())
      if (!conn.isClosed) {
        conn.close()
      }
    }
    counters
  }

  def executeBatchUpdate(sql: String, records: Seq[Seq[Any]], batchSize: Long = 1000, connection: Connection = null, autoClose: Boolean = true): Long = {
    var stmt: PreparedStatement = null
    var conn: Connection = connection
    try {
      if (conn == null) {
        conn = getConnection
      }
      conn.setAutoCommit(false)
      stmt = conn.prepareStatement(sql)
      val counter = records.zipWithIndex.map(record => {
        record._1.zipWithIndex.foreach(col => {
          packageParameters(stmt, col._2 + 1, col._1)
        })
        stmt.addBatch()
        if (record._2 % batchSize == 0) {
          stmt.executeBatch()
          conn.commit()
        }
        1
      }).size
      stmt.executeBatch()
      conn.commit()
      counter
    } catch {
      case e: Throwable =>
        log.error("ExecuteBatchUpdate error", e)
        throw e
    } finally {
      if (stmt != null && !stmt.isClosed) {
        stmt.close()
      }
      if (conn != null && autoClose && !conn.isClosed) {
        conn.close()
      }
    }
  }

  /**
    * 临时解决 Phoenix thin client PrepareStatement bug:
    * http://apache-phoenix-user-list.1124778.n5.nabble.com/Bind-Params-with-Union-throw-AvaticaSqlException-td4471.html
    */
  def executeQueryUnSafety(sql: String, connection: Connection = null, autoClose: Boolean = true)(exec: ResultSet => Unit): Unit = {
    var conn: Connection = connection
    var stmt: Statement = null
    var rs: ResultSet = null
    try {
      if (conn == null) {
        conn = getConnection
      }
      stmt = conn.createStatement()
      rs = stmt.executeQuery(sql)
      exec(rs)
    } catch {
      case e: Throwable =>
        log.error("ExecuteQuery error", e)
        throw e
    } finally {
      if (rs != null && !rs.isClosed) {
        rs.close()
      }
      if (stmt != null && !stmt.isClosed) {
        stmt.close()
      }
      if (conn != null && autoClose && !conn.isClosed) {
        conn.close()
      }
    }
  }

  def executeQuery(sql: String, parameters: Seq[Any], connection: Connection = null, autoClose: Boolean = true)(exec: ResultSet => Unit): Unit = {
    var conn: Connection = connection
    var stmt: PreparedStatement = null
    var rs: ResultSet = null
    try {
      if (conn == null) {
        conn = getConnection
      }
      stmt = conn.prepareStatement(sql)
      parameters.zipWithIndex.foreach(
        param => packageParameters(stmt, param._2 + 1, param._1))
      rs = stmt.executeQuery()
      exec(rs)
    } catch {
      case e: Throwable =>
        log.error("ExecuteQuery error", e)
        throw e
    } finally {
      if (rs != null && !rs.isClosed) {
        rs.close()
      }
      if (stmt != null && !stmt.isClosed) {
        stmt.close()
      }
      if (conn != null && autoClose && !conn.isClosed) {
        conn.close()
      }
    }
  }

  def ddl(sql: String, connection: Connection = null, autoClose: Boolean = true): Unit = {
    var conn: Connection = connection
    var stmt: Statement = null
    try {
      if (conn == null) {
        conn = getConnection
      }
      stmt = conn.createStatement()
      stmt.executeUpdate(sql)
    } catch {
      case e: Throwable =>
        log.error("DDL execute error", e)
        throw e
    } finally {
      if (stmt != null && !stmt.isClosed) {
        stmt.close()
      }
      if (conn != null && autoClose && !conn.isClosed) {
        conn.close()
      }
    }
  }
}
