package com.shujia.common

import org.apache.spark.sql.SparkSession

object SparkTool {


  /**
    * 获取spark sql的执行环境
    *
    * @param appName 任务名
    * @return
    */
  def getSparkSession(appName: String = "sparkJob"): SparkSession = {

    var spark: SparkSession = null

    val isLocal: Boolean = Config.getBoolean("is.local")
    if (isLocal) {
      spark = SparkSession
        .builder()
        .master("local[4]")
        .config("spark.sql.shuffle.partitions", 1)
        .appName(appName)
        .getOrCreate()
    } else {
      spark = SparkSession
        .builder()
        .appName(appName)
        .getOrCreate()
    }

    //返回spark
    spark

  }

}
