package demo.spark.utils

import org.apache.spark.sql.{SQLContext, SparkSession}

object SparkSql {

  implicit var master : String = "local[*]";
  implicit var metastoreUris : String = "thrift://localhost:9083";

  def getSession(appName:String = "App", config:Seq[Tuple2[String,String]] = Array[Tuple2[String,String]]()) (implicit master:String = master):SparkSession = {
    //Spark SQL 模块基类为 SparkSession(内置SQLContext), 为Spark SQL的功能入口!
    val builder: SparkSession.Builder = SparkSession.builder();
    builder.appName(appName);
    builder.master(master);
    if(config != null)
      for (configItem : (String,String) <- config)
        if(configItem._1 != null && configItem._2 != null)
          builder.config(configItem._1,configItem._2);

    val sparkSession: SparkSession = builder.getOrCreate();
    sparkSession;
  }

  def getContext(appName:String = "App", config:Seq[Tuple2[String,String]] = Array[Tuple2[String,String]]()) (implicit master:String = master): SQLContext = {
    getSession(appName,config)(master).sqlContext;
  }

  def getHiveSession(appName:String = "App",metastoreURIS: String = metastoreUris, config:Seq[Tuple2[String,String]] = Array[Tuple2[String,String]]())(implicit master:String = master):SparkSession = {
    require(metastoreURIS != null)
    val builder: SparkSession.Builder = SparkSession.builder();
    builder.appName(appName);
    builder.master(master);
    builder.config("hive.metastore.uris",metastoreURIS);
    if(config != null)
      for (configItem : (String,String) <- config)
        if(configItem._1 != null && configItem._2 != null)
          builder.config(configItem._1,configItem._2)
    builder.enableHiveSupport().getOrCreate()
  }
}
