package com.zhao.sparksql

import java.sql.{Connection, Statement}
import java.util.Properties

import com.alibaba.druid.pool.DruidDataSourceFactory
import javax.sql.DataSource

import _root_.scala.collection.mutable

/**
 * Description: <br/>
 * Copyright (c) ，2020 ， 赵 <br/>
 * This program is protected by copyright laws. <br/>
 * Date： 2020/12/28 21:08
 *
 * @author 柒柒
 * @version : 1.0
 */

class MySqlPool(username: String, password: String, jdbcurl: String) extends  Serializable {

  val druidProps = new Properties()
  // 获取Druid连接池的配置文件
  druidProps.setProperty("username", username)
  druidProps.setProperty("password", password)
  druidProps.setProperty("driver", "com.mysql.jdbc.Driver")
  druidProps.setProperty("url", jdbcurl)
  druidProps.setProperty("minIdle", "1")
  druidProps.setProperty("initialSize", "3")
  druidProps.setProperty("maxIdle", "20")

  val ds: DataSource = DruidDataSourceFactory.createDataSource(druidProps)

  def getConn(): Connection = {
    ds.getConnection
  }

  def returnConn(connection: Connection, ps: Statement) = {
    if (ps != null) {
      ps.close()
    }
    if (connection != null) {
      connection.close()
    }
  }
}


object MySqlPool {
  var map = mutable.Map[String,MySqlPool]()

  def apply(username: String, password: String, jdbcurl: String) = {
    var dbutils = map.getOrElse(jdbcurl, null)
    if (dbutils == null) {
      MySqlPool.synchronized {
        if (dbutils == null) {
          dbutils = new MySqlPool(username, password, jdbcurl)
          map.+=(jdbcurl -> dbutils)
        }
      }
    }
    dbutils
  }

}
