package com.cmnit.analysis.common

import com.cmnit.analysis.util.{ConfigurationManager, EnvUtil}
import org.apache.hadoop.security.UserGroupInformation
import org.apache.log4j.Logger
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

trait TApplication {

  private val logger: Logger = Logger.getLogger(this.getClass)

  def start(appName: String)(op: => Unit): Unit = {
    // kerberos配置
    System.setProperty("java.security.krb5.conf", ConfigurationManager.getProperty("java.security.krb5.conf"))
    System.setProperty("zookeeper.sasl.clientconfig", "Client")
    System.setProperty("zookeeper.sasl.client", "true")

    // TODO 本地模式需要用户密码传入
    UserGroupInformation.loginUserFromKeytab(ConfigurationManager.getProperty("kerberos.principal"), ConfigurationManager.getProperty("keytab.file"))
    logger.info("login user: " + UserGroupInformation.getLoginUser)

    // spark配置
    val conf = new SparkConf
    conf.set("spark.serializer", ConfigurationManager.getProperty("spark.serializer"))
    conf.set("spark.scheduler.mode", ConfigurationManager.getProperty("spark.scheduler.mode"))
    conf.set("spark.executor.extraJavaOptions", ConfigurationManager.getProperty("spark.executor.extraJavaOptions"))
    conf.set("spark.default.parallelism", ConfigurationManager.getProperty("spark.default.parallelism"))
    conf.set("spark.sql.shuffle.partitions", ConfigurationManager.getProperty("spark.sql.shuffle.partitions"))
    conf.set("sparkSession.debug.maxToStringFields", ConfigurationManager.getProperty("sparkSession.debug.maxToStringFields"))
    conf.set("hive.exec.dynamic.partition", "true")
    conf.set("hive.exec.dynamic.partition.mode", "nonstrict")
    conf.set("spark.sql.crossJoin.enabled", "true")
    // 确保application kill 后接收的数据能被处理完在关闭
    conf.set("spark.streaming.stopGracefullyOnShutdown", "true")

    logger.info("APP名称：" + appName)

    val mode = ConfigurationManager.getProperty("spark.runMode")
    logger.info("运行方式：" + mode)

    // 获取sparkSession对象
    val sparkSession: SparkSession = SparkSession.builder.config(conf).master(mode).appName(appName).enableHiveSupport.getOrCreate
    logger.info("创建sparkSession对象")
    // 将sparkSession放入threadlocal环境对象
    EnvUtil.put(sparkSession)

    try {
      op
    } catch {
      case ex: Throwable => println(ex.getMessage)
    }

    // 将sparkSession移除
    EnvUtil.clear()
    // 关闭sparkSession连接
    sparkSession.close
  }
}
