package com.ywps.vaas.framework.util

import com.ywps.vaas.framework.constant.PropertiesConstant
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.SparkSession.Builder

/**
 * 环境变量工具类: 将共享数据放在ThreadLocal，同一线程可以访问
 * 这里用于SparkSession的共享使用
 */
object EnvUtil {

  private val scThreadLocal = new ThreadLocal[SparkSession]()

  def put(sc: SparkSession): Unit = {
    scThreadLocal.set(sc)
  }

  /**
   * 设置访问远程hive参数
   *
   * @param sc
   * @param hiveMetastoreUris    指定hive的metastore的端口  默认为9083 在hive-site.xml中查看
   * @param sparkSqlWarehouseDir 指定hive的warehouse目录
   */
  def put(sc: SparkSession, hiveMetastoreUris: String, sparkSqlWarehouseDir: String): Unit = {
    sc.conf.set(PropertiesConstant.HIVE_METASTORE_URIS, hiveMetastoreUris)
    sc.conf.set(PropertiesConstant.SPARK_SQL_WAREHOUSE_DIR, sparkSqlWarehouseDir)
    scThreadLocal.set(sc)
  }

  /**
   * 自定义sparkBuilder获取sc
   * @param builder 自定义的sparkBuilder
   */
  def put(builder: Builder, flag: Boolean): Unit = {
    flag match {
      case true => scThreadLocal.set(builder.enableHiveSupport().getOrCreate())
      case false => scThreadLocal.set(builder.getOrCreate())
      case _ => scThreadLocal.set(builder.enableHiveSupport().getOrCreate())
    }
  }

  def take(): SparkSession = {
    scThreadLocal.get()
  }

  def remove(): Unit = {
    scThreadLocal.remove()
  }
}
