package com.ada.spark.sparksql

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}


object SparkSql02_Sql {

    def main(args: Array[String]) {

        //创建配置对象
        val conf: SparkConf = new SparkConf().setAppName("SparkSql02_Sql").setMaster("local[*]")

        //创建SparkSql的环境对象
        val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate();

        //读取数据，构建DataFrame
        val frame: DataFrame = spark.read.json("in/user.json")

        //将DataFrame转换为一张表
        frame.createOrReplaceTempView("user")

        //采用sql的语法访问数据
        spark.sql("select id,name,age from user").show()

        //释放资源
        spark.stop()

    }

}
