package com.ada.spark.sparksql

import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}
import org.slf4j.LoggerFactory


object SparkSql01 {

    def main(args: Array[String]) {

        //1.创建配置对象
        val conf: SparkConf = new SparkConf().setAppName("SparkSql01").setMaster("local[*]")

        //创建SparkSql的环境对象
        //单例模式，不能直接new SparkSession
        //val session: SparkSession = new SparkSession(conf)
        val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate();

        //读取数据，构建DataFrame
        val frame: DataFrame = spark.read.json("in/user.json")

        //展示数据
        frame.show()

        //释放资源
        spark.stop()

    }

}
