package cn.doitedu.dfdemo

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession

/**
 * @Date 22.4.10
 * @Created by HANGGE
 * @Description
 */
object C05_MakeDF_hive {
  Logger.getLogger("org").setLevel(Level.ERROR)
  def main(args: Array[String]): Unit = {

    val session = SparkSession
      .builder()
      .master("local[*]")
      .appName(this.getClass.getSimpleName)
      // 添加对hive的支持
      /**
       * 添加MySQL和spark-hive的依赖
       * 添加hive-site.xml   访问元数据服务
       *
       */
      .enableHiveSupport()
      .getOrCreate()
    /*val df = session.sql("show databases")
    val df = session.sql("show tables")*/
    val df = session.sql("select * from  default.tb_order")
    df.show()

  }

}
