package com.zhaosc.spark.sql.json

import org.apache.spark.sql.SparkSession

object JsonDatasource {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("JsonDatasource")
      .config("spark.master", "local")
      .getOrCreate();

    val df = spark.read.json("student.json");

    df.createTempView("student_scores");
    
    val studentScore=df.sqlContext.sql("select name,count(score) as countscore from student_scores where score>=80 group by name");
    
    studentScore.rdd.map(r=>{
      Tuple2(r.getAs[String]("name"), r.getAs[String]("countscore"))
    }).collect().foreach(println _)
  }
}