package com.hucais.etl.job.controller

import com.hucais.core.utils.{DefaultPropertiesUtil, SparkDefaultUtil}
import com.hucais.etl.job.service.DwsSelectService

object DwsSelectController {
  def main(args: Array[String]): Unit = {
    if (args.length < 2) {
      println("参数startDate、参数endDate都不能为空")
      return
    }
    // 获取SparkSession
//    val sparkSession = SparkDefaultUtil.getSession(isLocal = true, this.getClass.getSimpleName, withEs = true, withHive = true)
    val sparkSession = SparkDefaultUtil.getSession(isLocal = false, this.getClass.getSimpleName, withEs = true, withHive = true)

    val ssc = sparkSession.sparkContext
    ssc.hadoopConfiguration.set("fs.defaultFS", DefaultPropertiesUtil.get("fs.defaultFS"))
    DwsSelectService.action(ssc, sparkSession, args(0), args(1))
  }

}
