package com.spark.sql
import org.apache.spark.sql.SparkSession
import org.apache.spark.SparkConf
import org.apache.spark.sql.TypedColumn

object Test1 {
  def main(args: Array[String]) {
    val conf = new SparkConf().setMaster("local[2]").setAppName("SparkSQL")
    val spark = SparkSession
      .builder()
      .config(conf)
      .getOrCreate()
    import spark.implicits._
    val df = spark.read.json("/usr/local/spark-2.4.0-bin-hadoop2.7/examples/src/main/resources/people.json")
    df.show()
    df.select("name").show()
    df.select("age").show()
    df.createGlobalTempView("people")
    spark.sql("SELECT * FROM global_temp.people").show()

  }
}