package com.shujia.sql

import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo8Cache {
  def main(args: Array[String]): Unit = {


    val spark: SparkSession = SparkSession
      .builder()
      .appName("cache")
      .master("local")
      .config("spark.sql.shuffle.partitions", 1)
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    val student: DataFrame = spark
      .read
      .format("json")
      .load("Spark/data/stu/students.json")


    /**
      * 对多次使用的DF 进行缓存
      *
      */

    student.cache()


    student
      .groupBy($"clazz")
      .agg(count($"id"))
      .show()


    student
      .groupBy($"gender")
      .agg(count($"id"))
      .show()


    while (true){

    }

  }

}
