package com.shujia.sql

import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

object Demo1SqlContext {

  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setAppName("sql").setMaster("local")

    val sc = new SparkContext(conf)

    /**
      * SQLContext 老版本API
      *
      */

    //创建spark sql上下文对象
    val sqlContext = new SQLContext(sc)


    /**
      * DataFrame: 有列名的分布式数据加，底层也是RDD
      *
      */


    //通过sqlContext读取数据

    val studentDF: DataFrame = sqlContext.read.json("spark/data/students.json")


    //创建临时视图
    studentDF.createOrReplaceTempView("student")



    //编写sql
    val clazzNUmDF: DataFrame = sqlContext.sql("select clazz,count(*) from student group by clazz")

    clazzNUmDF.show()


  }

}
