package cn.itcast.tags.utils

import java.util
import java.util.Map

import cn.itcast.tags.config.ModelConfig
import com.typesafe.config.{Config, ConfigFactory, ConfigValue}
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object SparkUtils {
  def loadConf(resource:String):SparkConf={
    val sparkConf = new SparkConf()
    val config: Config = ConfigFactory.load(resource)

    val entrySet: util.Set[Map.Entry[String, ConfigValue]] = config.entrySet()
    import scala.collection.JavaConverters._
    entrySet.asScala
      .foreach{
        entry=>
          val resourceName: String = entry.getValue.origin().resource()
          if (resource.equals(resourceName)){
            sparkConf.set(entry.getKey,entry.getValue.unwrapped().toString)
          }
      }

    sparkConf
  }
  def createSparkSession(clazz: Class[_], isHive: Boolean = false): SparkSession = {
    // 1. 获取SparkConf对象
    val sparkConf = loadConf("spark.properties")
    // 2. 判断是否是本地模式
    if(ModelConfig.APP_IS_LOCAL){
      sparkConf.setMaster(ModelConfig.APP_SPARK_MASTER)
    }
    // 3. 创建SparkSession.Builder对象
    val builder: SparkSession.Builder = SparkSession.builder()
      .config(sparkConf)
      .appName(clazz.getSimpleName.stripSuffix("$"))
    // 4. 判断是否集成Hive
    if(ModelConfig.APP_IS_HIVE || isHive){
      builder
        .enableHiveSupport()
        .config("hive.metastore.uris", ModelConfig.APP_HIVE_META_STORE_URL)
    }
    // 5. 创建对象
    builder.getOrCreate()
  }

}
