package cn.lhz.util.spark

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

import java.util.Properties

/**
 * spark 工具类
 *
 * @author 李昊哲
 * @version 1.0.0
 */
object SparkUtil {
  def apply(): SparkSession = {
    System.setProperty("HADOOP_USER_NAME", "root")

    // System.setProperty("HADOOP_USER_NAME", "lhz")
    val sparkConf = new SparkConf()
    if (!sparkConf.contains("spark.master")) {
      sparkConf.setMaster("local")
    }
    val prop = new Properties()
    prop.load(this.getClass.getClassLoader.getResourceAsStream("hive.properties"))
    val sparkSession: SparkSession = SparkSession
      .builder()
      .appName("Spark SQL JDBC")
      .config(conf = sparkConf)
      .config("spark.sql.warehouse.dir", prop.getProperty("warehouse.dir"))
      .config("hive.metastore.uris", prop.getProperty("metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()
    sparkSession
  }

  /**
   * 获取 mysql 连接参数
   *
   * @param tableName 数据表名称 默认数据库名称为 air
   * @return
   */
  def mysqlConnectionProperties(tableName: String): Properties = {
    val prop = new Properties()
    prop.load(this.getClass.getClassLoader.getResourceAsStream("mysql.properties"))
    val schema = prop.remove("schema")
    prop.put("tableName", schema + "." + tableName)
    prop
  }

  /**
   * 获取 mysql 连接参数
   *
   * @param schema    数据库名称
   * @param tableName 数据表名称
   * @return
   */
  def mysqlConnectionProperties(schema: String, tableName: String): Properties = {
    val prop = new Properties()
    prop.load(this.getClass.getClassLoader.getResourceAsStream("mysql.properties"))
    prop.remove("schema")
    prop.put("tableName", schema + "." + tableName)
    prop
  }
}
