package com.shujia.sql

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object Demo4SaveDF {
  def main(args: Array[String]): Unit = {


    val spark: SparkSession = SparkSession
      .builder()
      .appName("sql")
      .master("local")
      .config("spark.sql.shuffle.partitions", 2)
      .getOrCreate()


    val df1: DataFrame = spark.read.json("spark/data/students.json")

    val countDF: DataFrame = df1.groupBy("clazz").count()


    //保存数据  json格式
    countDF
      .write
      .mode(SaveMode.Overwrite) //Overwrite 覆盖
      .json("spark/data/df")


    // 文本格式
    countDF.write
      .mode(SaveMode.Overwrite)
      .option("sep", ",")
      .csv("spark/data/csv")


  }
}
