package com.doit.beans.day06

import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Demo08_View {
  def main(args: Array[String]): Unit = {
    val session = SparkSession.builder()
      .appName("test")
      .master("local[*]")
      .enableHiveSupport()
      .getOrCreate()
    import  session.implicits._
    import   org.apache.spark.sql.functions._


    //DF =  RDD  +  Schema(结构)
    val df: DataFrame = session.read.json("data/log")
    // 临时视图  当前会话生效
    df.createOrReplaceTempView("tb_log1")

  //  session.table("")
   // session.sql("select  hive表")

    df.createTempView("tb_log2")
    session.sql(
      """
        |select
        |*
        |from
        |tb_log1
        |union all  -- union  去重
        |select
        |*
        |from
        |tb_log2
        |""".stripMargin).show()

    df.createOrReplaceGlobalTempView("tb_log3")

    df.createGlobalTempView("tb_log4")


    // 创建新的会话
    val session2 = session.newSession()

    session2.sql(
      """
        |select
        |*
        |from
        |global_temp.tb_log3
        |union all  -- union  去重
        |select
        |*
        |from
        |global_temp.tb_log4
        |""".stripMargin).show()




  }

}
