package com.bigdata.spark.util

import java.sql.Connection
import java.util.Properties

import com.alibaba.druid.pool.DruidDataSourceFactory
import javax.sql.DataSource

/**
 * @author Gerry chan
 * @version 1.0
 * jdbc 工具类
 */
object JDBCUtil {
  //初始化连接池
  var dataSource: DataSource = init()

  //初始化连接池方法
  def init():DataSource = {
    val properties = new Properties()
    val config: Properties = PropertiesUtil.load("config.properties")

    properties.setProperty("driverClassName", config.getProperty("jdbc.driver"))
    properties.setProperty("url", config.getProperty("jdbc.url"))
    properties.setProperty("username", config.getProperty("jdbc.user"))
    properties.setProperty("password", config.getProperty("jdbc.password"))
    properties.setProperty("maxActive", config.getProperty("jdbc.datasource.size"))

    DruidDataSourceFactory.createDataSource(properties)
  }

  //获取MySQL连接
  def getConnection:Connection = {
    dataSource.getConnection
  }
}
