package com.hucais.etl.job.controller

import com.hucais.core.utils.{DefaultPropertiesUtil, SparkDefaultUtil}
import com.hucais.etl.job.service.DwdBookBaseInfoService

object DwdBookBaseInfoController {

  def main(args: Array[String]): Unit = {
    if (args.length == 0 || args.length != 3) {
      println("参数startDate、参数endDate、参数isHandleHistoryData都不能为空")
      return
    }

    val startDate = args(0)
    val endDate = args(1)
    val isHandleHistoryData = args(2)
    // 获取SparkSession
//    val sparkSession = SparkDefaultUtil.getSession(isLocal = true, this.getClass.getSimpleName, withEs = false, withHive = true)
    val sparkSession = SparkDefaultUtil.getSession(isLocal = false, this.getClass.getSimpleName, withEs = false, withHive = true)
    val ssc = sparkSession.sparkContext
    ssc.hadoopConfiguration.set("fs.defaultFS", DefaultPropertiesUtil.get("fs.defaultFS"))

    if (isHandleHistoryData.equals("true")) {
      DwdBookBaseInfoService.actionOnHistoryData(ssc, sparkSession, startDate, endDate)
    } else {
      DwdBookBaseInfoService.action(ssc, sparkSession, startDate, endDate)
    }

  }

}
