package com.example.util


import org.apache.spark.sql.Row

import java.sql.{Connection, DriverManager, PreparedStatement, SQLException}
import java.util

/**
 * @Author: leali
 * @Version: 1.0
 * @Date: 2021/9/8-17:56
 * @Description:
 */
object JdbcUtil {

  def getConnection(driver: String, url: String, user: String, password: String): Connection = {
    Class.forName(driver)
    var connection: Connection = null
    try {
      connection = DriverManager.getConnection(url, user, password)
    } catch {
      case e: ClassNotFoundException => println("Driver not loaded:", e)
      case e: SQLException => println("Connection error: ", e)
      case e: Exception => e.printStackTrace()
    }
    connection
  }

  /**
   * after close
   *
   * @param connection 连接对象
   */
  def closeConnection(connection: Connection): Unit = {
    if (null != connection) {
      connection.close()
    }
  }

  /**
   * 重载
   *
   * @param mode NODE_MYSQL
   * @return
   */
  def getConnection(mode: String): Connection = {
    val envInfo: util.LinkedHashMap[String, String] = YamlUtil.getEnvInfo(mode)
    getConnection(driver = envInfo.get("driver"), url = envInfo.get("url"),
      user = envInfo.get("user"), password = envInfo.get("password"))
  }

  def operateStatement(connection: Connection, mode: String, sql: String, args: Any*): Any = {
    val statement: PreparedStatement = connection.prepareStatement(sql)
    var index = 1
    for (arg <- args) {
      arg match {
        case s: String => statement.setString(index, s)
        case i: Int => statement.setInt(index, i)
        case d: Double => statement.setDouble(index, d)
        case _ => println("args is error: ", arg)
      }
      index += 1
    }
    mode match {
      // executeQuery 用于查，可以返回ResultSet集
      case "SELECT" => statement.executeQuery()
      //executeUpdate 用于增删改，返回影响的行数
      case "INSERT" | "DELETE" | "UPDATE" => statement.executeUpdate()
      // TRUNCATE
      case _ => statement.execute()
    }
  }


  def upsertData(rows: Iterator[Row], tableName: String, fieldLength: Int, batchSize: Int = 1000): Unit = {
    var connection: Connection = null
    var preparedStatement: PreparedStatement = null
    try {
      connection = getConnection(mode = "NODE_MYSQL")
      // get length sql
      /**
       * REPLACE 只会替换主键id 相同的纪录
       */
      preparedStatement = connection.prepareStatement(s"REPLACE INTO $tableName VALUES (${("?," * fieldLength).dropRight(1)})")
      var size = 0
      while (rows.hasNext) {
        val row: Row = rows.next()
        for (i <- 0 until fieldLength) {
          preparedStatement.setObject(i + 1, row.get(i))
        }
        connection.setAutoCommit(false)
        preparedStatement.addBatch()
        size += 1
        if (size % batchSize == 0 && size != 0) {
          preparedStatement.executeBatch()
          SparkUtil.printLog("INSERT SUCCESS： " + size)
          preparedStatement.clearBatch()
        }
      }
      if (size > 0) {
        preparedStatement.executeBatch()
        SparkUtil.printLog("INSERT SUCCESS： " + size)
      }
      connection.commit()
    } catch {
      case e: Exception => e.printStackTrace()
    } finally {
      preparedStatement.close()
      connection.close()
    }
  }
}
