package com.shujia.common.util

import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession

/**
  * 封装的spark 工具将通用的代码放到这个工具中
  *
  */

abstract class SparkTool extends Logging {

  var day_id: String = _
  var month_id: String = _

  def main(args: Array[String]): Unit = {

    //获取时间参数
    if (args.length == 0) {
      log.error("请传入时间参数")
      return
    }

    day_id = args(0)

    //截取月
    month_id = day_id.substring(0, 6)

    log.info(s"时间参数：$day_id")

    log.info("创建spark环境")

    val jobName: String = this
      .getClass
      .getSimpleName
      .replace("$", "")

    val spark: SparkSession = SparkSession
      .builder()
      .appName(jobName)
      .enableHiveSupport() //开启hive的元数据支持
      .getOrCreate()

    this.run(spark)

  }

  def run(spark: SparkSession)

}
