package com.fine.core.utils

import cn.hutool.core.date.DateUtil
import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import com.fine.spark.connector.base.utils.Logging
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

import scala.collection.convert.ImplicitConversions._
import scala.collection.mutable

/**
 * @author jayce
 * @date 2021/11/21 10:41 AM
 * @version 1.0
 */
object SparkUtils extends Logging {

  def initSparkSession(conf: SparkConf, hiveEnabled: Boolean): SparkSession = {
    val sessionConf: SparkSession.Builder = SparkSession
      .builder()
      .config(conf)
    Option(hiveEnabled).filter(b => b)
      .foreach(s => {
        sessionConf.enableHiveSupport()
        logger.info("set enableHiveSupport.")
      })
    val session = sessionConf.getOrCreate
    logger.info("Spark session initialized success.")
    session
  }

  def getSparkConf(envConfigs: String): SparkConf = {
    val conf = new SparkConf()
    val env: Map[String, String] = getSparkEnv(envConfigs)

    conf.setAppName(env.getOrElse("spark.app.name", DateUtil.now()))

    if (!env.containsKey("spark.master")) {
      val master = "local[*]"
      conf.setMaster(master)
      System.setProperty("HADOOP_USER_NAME", "hadoop")
      logger.warn(s"set spark master url to '$master' by default.")
      conf.set("spark.sql.warehouse.dir", "file:///spark-warehouse")
      conf.set("spark.testing.memory", "471859200")
    }

    conf.set("spark.sql.crossJoin.enabled", "true")
      .set("hive.exec.dynamic.partition", "true")
      .set("hive.exec.dynamic.partition.mode", "nonstrict")
    logger.info("enabled spark sql cross join")

    env.foreach({
      case (key, value) => conf.set(key, value)
    })

    conf
  }

  def getSparkEnv(configJson: String): Map[String, String] = {
    Option(JSON.parseObject(configJson))
      .map(json => {
        val array: JSONArray = json.getJSONObject("env").getJSONArray("spark")
        var map: Map[String, String] = array.map(item => {
          val paramLine: String = item.asInstanceOf[String]
          val fields: Array[String] = paramLine.split("=")
          (fields(0), fields(1))
        }).toMap
        if (!map.contains("spark.app.name")) {
          map += ("spark.app.name" -> Option(json.getString("appname")).getOrElse(DateUtil.now()))
        }
        map
      }).getOrElse(Map.empty)
  }
}
