package com.xinqing.bigdata.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * @Author:CHQ
  * @Date:2020 /8/5 16:03
  * @Description
  */
object DataFrame {
  def main(args: Array[String]): Unit = {
    //创建SparkSession对象（执行SparkSQL必备）
    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("spark sql test1")
    val sparkSession: SparkSession = SparkSession.builder().config(sparkConf).getOrCreate()

    //读取数据，转换成DataFrame
    val dataFrame: DataFrame = sparkSession.read.json("input/user.json")

    //展示结果
    // dataFrame.show()
    //  |age|name|
    //  +---+----+
    //  | 22| chq|

    //先json数据创建为一张表
    dataFrame.createOrReplaceTempView("user")

    //通过sql去操作表，展示结果
    sparkSession.sql("select * from user").show()
    //  |age|name|
    //  +---+----+
    //  | 22| chq|


    //释放资源
    sparkSession.stop()
  }
}
