package com.king.spark.sql

import java.util.Properties

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._

import scala.collection.mutable


object SparkSQL09_read_write{
  def main(args: Array[String]): Unit = {

    System.setProperty("HADOOP_USER_NAME", "atguigu")

    //新的起点: SparkSession

    val sparkConf: SparkConf = new SparkConf().setAppName("SparkSQL").setMaster("local[*]")

    val spark: SparkSession =
          SparkSession.builder()
            //.appName("SparkSQL")
            //.master("local[*]")
            .config(sparkConf)
            .enableHiveSupport()  // 启用hive的支持
            .getOrCreate()

    var sc = spark.sparkContext
    //写代码不管用不用都导入。
    import spark.implicits._

    //Spark - 外部的hive

    spark.sql("show tables").show


    spark.stop()

  }
}
