//package cn.ipanel.bigdata.boot
//
//import cn.ipanel.bigdata.boot.config.Configuration
//import cn.ipanel.bigdata.boot.logger.Logger
//import cn.ipanel.bigdata.utils.Util._
//import org.apache.commons.lang3.exception.ExceptionUtils
//import org.apache.spark.{SparkConf, SparkContext}
//import org.apache.spark.sql.hive.HiveContext
//import org.apache.spark.sql.SQLImplicits
//
//import scala.collection.JavaConversions._
//
//@deprecated
//trait SessionOld {
//  final lazy val IMPLICITS: SQLImplicits = spark.implicits
//  final lazy val spark: HiveContext = SessionOld.build
//}
//
///**
// *  这是spark 1.6 的实现方式，2.4 改用 {@link Session}
// */
//object SessionOld {
//
//  // add20220725，一切原罪，都在 newSession 这个方法，spark 1.6 环境不能这么干，2.4 可以
//  private[boot] def build: HiveContext = SPARK// .newSession()
//  private[boot] val UNKNOWN: String = "unknown"
//
//  private[this] lazy val SPARK: HiveContext = {
//    try {
//      val conf = new SparkConf()
//      if (nonEmpty(Configuration.spark.conf))
//        Configuration.spark.conf.foreach(kv => conf.set(kv._1, kv._2))
//      if (nonEmpty(Configuration.spark.jars))
//        conf.setJars(Configuration.spark.jars)
//      val appName = Application.getExecJobName
//      conf.setAppName(if (nonEmpty(appName)) appName else UNKNOWN)
//      conf.setMaster(Configuration.spark.master)
//
//      //      Logger.I("old conf: hive.exec.dynamic.partition.mode: " + conf.get("hive.exec.dynamic.partition.mode") + " hive.exec.dynamic.partition: " + conf.get("hive.exec.dynamic.partition"))
//      //      conf.set("hive.exec.dynamic.partition", "true")
//      //      conf.set("hive.exec.dynamic.partition.mode", "nonstrict")
//      //      Logger.I("new conf: hive.exec.dynamic.partition.mode: " + conf.get("hive.exec.dynamic.partition.mode") + " hive.exec.dynamic.partition: " + conf.get("hive.exec.dynamic.partition"))
//
//      val sparkContext = new SparkContext(conf)
//      sparkContext.setLogLevel(if (isNull(Configuration.spark.loglevel)) "ERROR" else Configuration.spark.loglevel)
//      Logger.I("spark conf: hive.exec.dynamic.partition.mode: " + sparkContext.getConf.get("hive.exec.dynamic.partition.mode") + " hive.exec.dynamic.partition: " + sparkContext.getConf.get("hive.exec.dynamic.partition"))
//      val hiveContext = new HiveContext(sparkContext)
//      Logger.I("hive conf: hive.exec.dynamic.partition.mode: " + sparkContext.getConf.get("hive.exec.dynamic.partition.mode") + " hive.exec.dynamic.partition: " + sparkContext.getConf.get("hive.exec.dynamic.partition"))
//      if (nonEmpty(Configuration.spark.conf))
//        Configuration.spark.conf.foreach(kv => hiveContext.setConf(kv._1, kv._2))
//
//      //      Logger.I("new3 conf: hive.exec.dynamic.partition.mode: " + hiveContext.getConf("hive.exec.dynamic.partition.mode") + " hive.exec.dynamic.partition: " + hiveContext.getConf("hive.exec.dynamic.partition"))
//      //      hiveContext.setConf("hive.exec.dynamic.partition", "true")
//      //      hiveContext.setConf("hive.exec.dynamic.partition.mode", "nonstrict")
//      //      Logger.I("new4 conf: hive.exec.dynamic.partition.mode: " + hiveContext.getConf("hive.exec.dynamic.partition.mode") + " hive.exec.dynamic.partition: " + hiveContext.getConf("hive.exec.dynamic.partition"))
//
//      Logger.I("session build: hive.exec.dynamic.partition.mode: " + hiveContext.getConf("hive.exec.dynamic.partition.mode") + " hive.exec.dynamic.partition: " + hiveContext.getConf("hive.exec.dynamic.partition"))
//
//      hiveContext
//    } catch {
//      case e: Exception =>
//        Logger.E(
//          s""" build spark session failed.
//             | Because: ${ExceptionUtils.getStackTrace(e)}
//             |""".stripMargin)
//        null
//    }
//  }
//}