package cn.ipanel.bigdata.boot

import cn.ipanel.bigdata.boot.config.Configuration
import cn.ipanel.bigdata.boot.logger.Logger
import cn.ipanel.bigdata.utils.Dictionary
import cn.ipanel.bigdata.utils.Util._
import org.apache.commons.lang3.exception.ExceptionUtils
import org.apache.spark.SparkConf
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.{SQLImplicits, SparkSession}

import scala.collection.JavaConversions._

/**
 * Author: lzz
 * Date: 2021/11/16 16:15
 */
trait Session {

  lazy val IMPLICITS: SQLImplicits = spark.implicits
  private final lazy val customerConf: Map[String, String] = optCostumerConf
  final lazy val spark: SparkSession = Session.build(customerConf)
  def optCostumerConf: Map[String, String] = null

  /**
   * 注意：默认值不能给 null，否则不会报错，单解析 map 字段会失败
   */
  object Functions {
    val func_mapToS: UserDefinedFunction = spark.udf.register("_mapToS", mapToS _)
    val func_mapToI: UserDefinedFunction  = spark.udf.register("_mapToI", mapToI _)
    val func_mapToL: UserDefinedFunction  = spark.udf.register("_mapToL", mapToL _)
  }
}

object Session {

  private var customerConf: Map[String, String] = _
  private[boot] def build(customerConf: Map[String, String]): SparkSession = {
    this.customerConf = customerConf
    SPARK.newSession()
  }

  private[this] lazy val SPARK: SparkSession = {
    try {
      val conf = new SparkConf()
      println("lzz judge: " + (customerConf == null))
      // 用户自定义组他组件的配置，如 redis
      if (customerConf != null && customerConf.nonEmpty) {
        println("lzz success: " + customerConf.toString())
        conf.setAll(customerConf)
      }

      if (nonEmpty(Configuration.spark.conf))
        Configuration.spark.conf.foreach(kv => conf.set(kv._1, kv._2))
      if (nonEmpty(Configuration.spark.jars))
        conf.setJars(Configuration.spark.jars)
      val appName = Application.getExecJobName
      val spark = SparkSession.builder()
        .appName(if (nonEmpty(appName)) appName else Dictionary.UNKNOWN)
        .master(Configuration.spark.master)
        .config(conf)
        .enableHiveSupport()
        .getOrCreate()
      spark.sparkContext.setLogLevel(if (isNull(Configuration.spark.loglevel)) "ERROR" else Configuration.spark.loglevel)
      spark
    } catch {
      case e: Exception =>
        Logger.E(
          s""" build spark session failed.
             | Because: ${ExceptionUtils.getStackTrace(e)}
             |""".stripMargin)
        null
    }
  }
}