package com.jinghang.spark_base._020_SQL

import org.apache.spark.sql.SparkSession

object DataFrameExample {

  def main(args: Array[String]): Unit = {
    val session = SparkSession.
      builder().appName("DataFrameExample")
      .master("local[2]")
      .config("spark.testing.memory", "471859200")
      .getOrCreate()

    session.sparkContext.setLogLevel("ERROR")

    runBasicDataFrameExample(session)

  }

 def runBasicDataFrameExample(sparkSession: SparkSession)={
   import sparkSession.implicits._
   val dataFrame = sparkSession.read.json("data/practiceOperator/people.json")

   //打印元数据信息，数据结构信息
   println("printSchema")
   dataFrame.printSchema()

   println("show")
   dataFrame.show(10)

   println("dataFrame.select(\"name\")")
   dataFrame.select("name").show()

   println("filter($\"age\">20)")
   dataFrame.filter($"age">20).show()

   dataFrame.select($"name",$"age"+2).show()

   dataFrame.groupBy("age").count().show()

   println("select * from people")
   //创建临时视图，只在本Session内有效
   dataFrame.createOrReplaceTempView("people")

   val sqlDF = sparkSession.sql("select * from people")
   sqlDF.show()
   println("newSession")
   //sparkSession.newSession().sql("select * from people").show()

  //全局（Global）视图，在多个Session有效
   println("createGlobalTempView")
   dataFrame.createGlobalTempView("people1")

   // Global temporary view is tied to a system preserved database `global_temp`
   sparkSession.sql("select * from global_temp.people1").show()

   // 在一个新session中通过
   sparkSession.newSession().sql("select * from global_temp.people1").show()



   sparkSession.stop()






 }

}
