package com.hucais.etl.job.controller

import com.hucais.core.utils.DefaultPropertiesUtil
import com.hucais.etl.job.service.DdService
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object DdController {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "root")
    val sparkConf = new SparkConf()
      .setAppName("DdController")
//      .setMaster("local[*]")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .set("spark.driver.allowMultipleContexts", "true")
      .set("es.index.auto.create", "false")
      .set("es.nodes", DefaultPropertiesUtil.get("es.nodes"))
      .set("es.port", DefaultPropertiesUtil.get("es.port"))

    val sparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()
    val ssc = sparkSession.sparkContext
    ssc.hadoopConfiguration.set("fs.defaultFS", DefaultPropertiesUtil.get("fs.defaultFS"))

    var startDate = "2022-02-28"
    var endDate = "2022-03-01"
    DdService.action(ssc, sparkSession, startDate,endDate)
  }

}
