package com.hngy.scala.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.{Dataset, Row, SparkSession}

/**
  * 需求：使用sql操作DataFrame
  */
object DataFrameSqlScala {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf
    conf.setMaster("local")

    //创建SparkSession对象，里面包含SparkContext和SqlContext
    val sparkSession = SparkSession.builder.appName("SqlDemoJava").config(conf).getOrCreate
    val stuDf = sparkSession.read.json("F:\\BaiduNetdiskDownload\\hadoop\\source\\bigdata_course_materials\\spark2\\student.json")

    //将DataFrame注册为一个临时表
    stuDf.createOrReplaceTempView("student")

    //使用sql查询临时表中的数据
    sparkSession.sql("select age,count(1) as num from student group by age").show()

    sparkSession.stop()
  }
}
