package scala
import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}

class SparkCreate {
  def initializeSparkSession(): SparkSession = {
    val conf = new SparkConf().setAppName("spark")
//      .setMaster("local[*]")
//    在idea中运行需要 .setMaster("local[*]") 取消注释
//    在虚拟机中运行需要注释 否则8080端口没有显示
    val sc = new SparkContext(conf)
    sc.hadoopConfiguration.set("mapreduce.fileoutputcommitter.marksuccessfuljobs", "false")
    sc.hadoopConfiguration.set("dfs.checksum.type", "NULL")
    val spark = SparkSession.builder()
      .appName("spark")
      .master("spark://192.168.56.104:7077")
      .config("spark.sql.warehouse.dir", "/user/hive/warehouse")
      .config("hive.metastore.uris", "thrift://192.168.56.104:9083")
      .enableHiveSupport()
      .getOrCreate()

    spark
  }

}