package com.zt.bigdata.template.spark

import com.zt.bigdata.spark.common.dto.BasicParameter
import org.apache.spark.SparkContext
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession
import org.apache.spark.util.LongAccumulator

trait BasicTemplate[P <: BasicParameter] extends Logging with Serializable {

  protected var appName: String = ""
  private var innerSparkSession: SparkSession = _

  def setAppName(appName: String) = {
    this.appName = appName
    this
  }

  def process(parameter: P): Unit

  /**
    * 初始化SparkSession 默认不添加Hive支持
    *
    * @param parameter
    * @param enabledHive
    * @return
    */
  def buildSparkSession(parameter: P, config: Map[String, String] = Map(), enabledHive: Boolean = false): SparkSession = {
    log.info(s"Starting $appName")
    val build = SparkSession.builder
    if (parameter.sparkMaster != null) {
      build.master(parameter.sparkMaster)
    }
    config.foreach(item => build.config(item._1, item._2))
    if (enabledHive) {
      build.enableHiveSupport()
    }
    //build.config("spark.streaming.stopGracefullyOnShutdown",true)
    innerSparkSession = build.appName(appName).getOrCreate()
    innerSparkSession
  }

  protected def errorMonitor(category: String, sc: SparkContext, processErrorFun: Long => Unit): LongAccumulator = {
    val errorCode = sc.longAccumulator(s"dmp.error.$category")
    new Thread(new Runnable {
      override def run(): Unit = {
        var hasSendError = false
        while (!hasSendError) {
          if (!errorCode.isZero) {
            log.error(s"Topic [$category] execute error.")
            hasSendError = true
            processErrorFun(errorCode.value)
          }
          Thread.sleep(60000)
        }
      }
    }).start()
    errorCode
  }


}
