package com.ww.spark.sql

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql._

object SQL04_standalone_hive {





  def main(args: Array[String]): Unit = {
    val ss: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("sql04")
      .config("spark.sql.shuffle.partitions", 1)
      //.config("spark.sql.warehouse.dir", "D:/me/study/code/idea/try-scala-spark/spark/warehouse")
      .enableHiveSupport()   //开启hive支持   ？   自己会启动hive的metastore
      .getOrCreate()
    val sc: SparkContext = ss.sparkContext

    //ss.sql("create table xxx(name string,age int)")
    //ss.catalog.listTables().show()

    println("\n 创建库=====================================================================================================================")
    import ss.sql
    ss.catalog.listTables().show()  //作用再current库
    sql("create database msb")
    sql("create table table01(name string)")  //作用再current库
    ss.catalog.listTables().show() //作用再current库

    println("--------------------------------")

    sql("use msb")
    ss.catalog.listTables().show() //作用再msb这个库

    sql("create table table02(name string)")  //作用再msb库
    ss.catalog.listTables().show() //作用再msb这个库

  }
}
