package com.sys.tdhclient.utils

import java.io.File
import java.util.Properties

import com.sys.tdhclient.client.InitClient
import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}

class SparkSc private {
  val  properties = new InitClient().init()
  
  def sparkss():(SparkSession,SparkContext) = {
    //加载配置文件，集群kerboers认证
    var confFilePath = System.getProperty("user.dir") + File.separator + "conf" + File.separator
    //创建sparkSparkSession
    val conf: SparkConf = new SparkConf().setAppName("SparkRreadHdfs").setMaster(properties.getProperty("spark_master"))

    val sparkSession: SparkSession = SparkSession.builder().config(conf).getOrCreate()
    val sc: SparkContext = sparkSession.sparkContext
    //设置配置文件
    sc.hadoopConfiguration.set("hadoop.security.authentication", "kerberos")
    sc.hadoopConfiguration.addResource("core-site.xml")
    sc.hadoopConfiguration.addResource("hdfs-site.xml")
    sc.hadoopConfiguration.addResource("yarn-site.xml")
    return (sparkSession,sc)
  }
}
object SparkSc {
  val sc = new SparkSc
  def getSparkContext():SparkContext= {
    sc.sparkss()._2
  }
  def getProperties():Properties={
     sc.properties
    }
  def getSparkSession(): SparkSession ={
    sc.sparkss()._1
  }
}
