package com.shujia.spark.sql

import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo06SparkOnHive {
  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName(this.getClass.getSimpleName.replace("$", ""))
      // 控制在SparkSQL中进行Shuffle操作时默认的分区数，默认值为200，相当于会启动200个Task进行处理
      .config("spark.sql.shuffle.partitions", "2")
      .enableHiveSupport() // 开启Hive的支持
      .getOrCreate()

    val stuHiveDF: DataFrame = spark.table("db01.student")

    stuHiveDF.show()

    spark.sql(
      """
        |select id,name from db01.student
        |""".stripMargin).show()

  }

}
