package org.qnit.util

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object SparkUtil {

  def envs = Array("dev", "prod")

  def verifyEnv(env: String) = {
    envs.contains(env)
  }

  def initSparkSession(env: String, appName: String): SparkSession = {
    val sparkConf = env match {
      case "dev" => new SparkConf().setAppName(appName).setMaster("local[*]")
      case "prod" => new SparkConf().setAppName(appName)
    }
    System.setProperty("HADOOP_USER_NAME", "dev")
    val spark = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()
    val sc = spark.sparkContext
    sc.setLogLevel("error")
    sc.hadoopConfiguration.set("fs.defaultFS", "hdfs://nameservice1")
    spark
  }

}
