package com.at.bigdata.spark.util

import com.alibaba.druid.pool.DruidDataSourceFactory

import java.sql.{Connection, PreparedStatement}
import java.util.Properties
import javax.sql.DataSource

/**
 *
 * @author cdhuangchao3
 * @date 2023/6/2 11:43 AM
 */
object JDBCUtil {

  var dataSource:DataSource = init()

  def init(): DataSource = {
    val properties = new Properties()
    properties.put("driverClassName", "com.mysql.jdbc.Driver")
    properties.put("url", "jdbc:mysql://11.158.108.205:3306/hc_test?serverTimezone=GMT%2B8&useSSL=false")
    properties.put("username", "root")
    properties.put("password", "Jdrc@123456")
    properties.put("maxActive", "10")
    DruidDataSourceFactory.createDataSource(properties)
  }

  def getConn(): Connection = {
    dataSource.getConnection
  }

  // 执行Sql语句，单条插入
  def executeUpdate(connection: Connection, sql:String, params:Array[Any]): Unit = {
    var rtn = 0
    var pstmt: PreparedStatement = null
    try {
      connection.setAutoCommit(false)
      pstmt = connection.prepareStatement(sql)
      if (params !=null && params.length > 0) {
        for (i <- params.indices) {
          pstmt.setObject(i+1, params(i))
        }
      }
      rtn = pstmt.executeUpdate()
      connection.commit()
      pstmt.close()
    } catch {
      case e: Exception => e.printStackTrace()
    }
    rtn
  }

  def isExist(connection: Connection, sql: String, params:Array[Any]): Boolean = {
    var flag = false
    var pstmt:PreparedStatement = null
    try {
      pstmt = connection.prepareStatement(sql)
      for (i <- params.indices) {
        pstmt.setObject(i+1, params(i))
      }
      flag = pstmt.executeQuery().next()
      pstmt.close()
    } catch {
      case e: Exception => e.printStackTrace()
    }
    flag
  }
}
