package com.hliushi.spark.entrance

import org.apache.spark.sql.SparkSession

/**
 * descriptions:
 *
 * author: Hliushi
 * date: 2021/6/13 10:34
 */
object SparkContextDemo {

  /**
   * class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Serializable {
   *
   * .  import SparkConf._
   * .
   * .  /** 创建一个从系统属性和类路径加载默认值的 SparkConf*/
   * .  def this() = this(true)
   * }
   *
   * 2021年6月13日10:46:55  测试未成功,  原因未知
   */
  def main(args: Array[String]): Unit = {
    //val conf: SparkConf = new SparkConf().setAppName("test")
    //  .setMaster("local[2]")
    //
    //// conf.getAll以对列表的形式获取所有参数
    //val all = conf.getAll
    //
    //
    //all.foreach(x => println(x._1, x._2))

    val spark = SparkSession.builder()
      .master("local[6]")
      .appName("hive_read2")
      .enableHiveSupport() // 1.开启Hive支持
      .config("hive.metastore.uris", "thrift://node03:9083") // 2.指定MetaStore的位置
      .config("spark.sql.warehouse.dir", "hdfs://node01:8020/user/hive/warehouse") // 3.指定warehouse的位置
      //.config("spark.sql.warehouse.dir", "hdfs://node01:8020/user/hive/warehouse/test_sql.db/student") // 3.指定warehouse的位置
      .getOrCreate()


    val dataFrame = spark.read.table("test_sql.student")

    dataFrame.show()

    spark.stop()
  }
}