package com.spark.util.core

import com.spark.util.listener.SparkCoreListener
import com.spark.util.utils.PropertiesUtil
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.sql.SparkSession

trait Sparking {

  self:{ def main(args: Array[String]):Unit } =>

  Logging.init()
  PropertiesUtil.load("dev.properties")

  val conf = new SparkConf()
  conf.set("spark.serializer", classOf[KryoSerializer].getName)
  conf.set("spark.extraListeners", classOf[SparkCoreListener].getName)
  conf.setAppName(this.getClass.getName.stripSuffix("$"))

  // 屏蔽不必要的日志显示在终端上
  def enableLogEliminating(): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.WARN)
    Logger.getLogger("org.apache.kafka.clients.consumer").setLevel(Level.WARN)
  }

  def enableTaskMonitorSupport(): Unit = conf.set("enableSendMessageOnTaskFail", "true")

  def getSparkSession(uris: Option[String]): SparkSession = {
    val isLocal = conf.getOption("spark.master")
    if(isLocal.isEmpty) conf.setMaster("local[*]")
    val builder = SparkSession.builder().config(conf)
    if(uris.isDefined){
      builder
        .config("hive.metastore.uris", uris.get)
        .enableHiveSupport()
    }
    builder.getOrCreate()
  }
}
