import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

/*
https://blog.csdn.net/weixin_43093501/article/details/95023669
 */
object E6_DF_Hive {

  def main(args: Array[String]): Unit = {
//    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkSQL01_Demo")
    val spark: SparkSession = SparkSession
      .builder()
//      .config(conf)
      .enableHiveSupport()
      .config("spark.sql.warehouse.dir", "hdfs://localhost:9000/user/hive/warehouse")
      //      .config("hive.metastore.warehouse.dir", "hdfs://")
      .master("local[*]")
      .appName("SQLTest")
      .getOrCreate()
    spark.sql("show tables").show()
    spark.sql("select 1 ").show
    //    spark.sql("select 1").show()
    //释放资源
    spark.stop()
  }

}
