package com.shujia.utils

import org.apache.spark.internal.Logging
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.slf4j.Logger

abstract class SparkTool extends Logging {
  val logger: Logger = log
  var pt:String = _
  // 保存到HBase所需要的参数
  var tableName:String = _
  var columnFamily:String = _
  var tagName:String = _

  // 初始化环境
  def main(args: Array[String]): Unit = {
    if (args.isEmpty) {
      logger.error("请指定分区日期！")
      return
    }

    pt = args.head
    logger.info(s"正在处理的分区为：$pt")

    // 构建环境
    val spark: SparkSession = SparkSession
      .builder()
      .appName(this.getClass.getSimpleName.replace("$", ""))
      .enableHiveSupport()
      .getOrCreate()

    // 提取数据
    val df: DataFrame = extractData(spark)

//    // 将DF进行保存
//    saveDF(df)
    // 将DF保存到HBase中
    HBaseTools.saveToHBase(df,tableName,columnFamily,tagName)
  }


  def extractData(spark: SparkSession): DataFrame

//  def saveDF(df:DataFrame):Unit
}
