package com.lvmama.rhino.common.utils.hbase

import org.apache.hadoop.hbase.{HBaseConfiguration, HConstants}
import org.apache.spark.SparkConf

/**
  * Created by yuanxiaofeng on 2016/7/18.
  */
case class HBaseSparkConf (
  hbaseHost: Option[String] = None,
  hbaseXmlConfigFile: String = "hbase-site.xml") extends Serializable {

    def createHadoopBaseConfig() = {
      val conf = HBaseConfiguration.create

      val xmlFile = Option(getClass.getClassLoader.getResource(hbaseXmlConfigFile))
      xmlFile.foreach(f => conf.addResource(f))

      hbaseHost.foreach(h => conf.set(HConstants.ZOOKEEPER_QUORUM, h))
      if(Option(conf.get(HConstants.ZOOKEEPER_QUORUM)).isEmpty)
        conf.set(HConstants.ZOOKEEPER_QUORUM, HBaseSparkConf.DefaultHBaseHost)

      conf
    }
  }

  object HBaseSparkConf extends Serializable {

    val DefaultHBaseHost = "localhost"

    def fromSparkConf(conf: SparkConf) = {
      HBaseSparkConf(
        hbaseHost = Option(conf.get("spark.hbase.host", null))
      )
    }
}
