package com.spark.utils

import java.util

import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.{Connection, ConnectionFactory}

object HbaseUtils {
  private val pool = {
    val pool = new util.LinkedList[Connection]()
    val conf = HBaseConfiguration.create()
    //创建10个Hbase连接对象
    for(i <- 0 to 9){
      val HBaseConn = ConnectionFactory.createConnection(conf)
      //加入连接池
      pool.push(HBaseConn)
    }
    pool
  }

  def getConnection():Connection={
    synchronized{
      while(pool.isEmpty){
        //等待1S
        Thread.sleep(1000)
      }
      //出栈
      pool.poll()
    }
  }

  //归还连接对象
  def returnConnection(con:Connection)={
    pool.push(con)
  }
}
