package com.mjf.spark.day09

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

/**
 * SparkSQL操作hive
 */
object SparkSQL02_Hive {
  def main(args: Array[String]): Unit = {

    // 创建SparkConf配置文件对象
    val conf: SparkConf = new SparkConf().setAppName("SparkSQL").setMaster("local[*]")

    // 创建SparkSQL执行的入口对象  enableHiveSupport表示开启hive支持
    val spark: SparkSession = SparkSession.builder().enableHiveSupport().config(conf).getOrCreate()

    spark.sql("show tables in gmall").show()

    // 释放资源
    spark.close()

  }
}
