package com.fine.core

import com.alibaba.fastjson.{JSON, JSONObject}
import com.fine.core.parser.ConfigParser
import com.fine.core.stage.BatchPip
import com.fine.core.utils.SparkUtils
import com.fine.spark.connector.base.utils.Logging
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

import java.lang

/**
 * 基础入口
 *
 * @author jayce
 * @date 2021/11/20 4:47 PM
 * @version 1.0
 */
object App extends Logging {

  def batch(config: JSONObject, conf: SparkConf): Unit = {
    logger.info("start batch process")

    val enableHive: lang.Boolean = Option(config)
      .map(json => json.getBoolean("enableHive"))
      .getOrElse(false)

    implicit val ss: SparkSession = SparkUtils.initSparkSession(conf, enableHive)

    BatchPip.startPip(config)

    ss.stop()
  }

  def main(args: Array[String]): Unit = {
    val absulotePath = args(0)
    val configJson: String = ConfigParser
      .loadYaml2String(absulotePath)
    val config: JSONObject = JSON.parseObject(configJson)

    val conf: SparkConf = SparkUtils.getSparkConf(configJson)

    // 根据configJson去生成每一个类
    batch(config, conf)
  }
}
