package com.spark.WorCount.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object SparkSqlTestByScala {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setMaster("local")
    //创建SparkSession对象，里面包含SparkContext和SqlContext
    val sparkSession=SparkSession.builder()
      .appName("SparkSqlTestByScala")
      .config(conf)
      .getOrCreate()
    //读取json文件，获取DataFrame
    val stuDf = sparkSession.read.json("datas/student.json")
    val giftDf=sparkSession.read.json("datas/gift_record.log")
    //查看DataFrame中的数据
    stuDf.show()
    giftDf.show()
    sparkSession.stop()
  }
}
