package com.hucais.core.utils

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

/**
 * 用于对Spark程序的配置和参数统一处理的工具类
 */
object SparkDefaultUtil {

  /**
   * 获取SparkSession
   *
   * @param isLocal      是否使用本地模式,true:用于本地开发环境调试";false:用于集群跑
   * @param inputAppName 应用名称
   * @param withEs       是否整合ElasticSearch
   * @param withHive     是否整合Hive
   * @return
   */
  def getSession(isLocal: Boolean, inputAppName: String, withEs: Boolean, withHive: Boolean): SparkSession = {
    // 设置全局变量:访问HDFS的用户角色名
    System.setProperty("HADOOP_USER_NAME", "root")

    val appName = inputAppName.replaceAll("\\$", "")
    val sparkConf = new SparkConf()
      .setAppName(appName)
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .set("spark.driver.allowMultipleContexts", "true")
      .set("spark.locality.wait", "0")

    if (isLocal) {
      sparkConf.setMaster("local[*]")
    }

    if (withEs) {
      sparkConf
        .set("es.index.auto.create", "false")
        .set("es.batch.write.refresh", "false")
        .set("es.nodes.wan.only", "true")
        .set("es.nodes.discovery", "false")
        .set("es.nodes", DefaultPropertiesUtil.get("es.nodes"))
        .set("es.port", DefaultPropertiesUtil.get("es.port"))
        .set("es.net.http.auth.user", DefaultPropertiesUtil.get("es.username"))
        .set("es.net.http.auth.pass", DefaultPropertiesUtil.get("es.password"))
        .set("es.batch.size.entries", "2000")
    }
    if (withHive) {
      SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()
    } else {
      SparkSession.builder().config(sparkConf).getOrCreate()
    }
  }

}
