package com.shujia.spark.sql

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object Demo2SparkDSL {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("dsl")
      .config("spark.sql.shuffle.partitions", 1) //指定spark sql shuffle之后rdd的分区数
      .getOrCreate()

    //读取数据
    val scoreDF: DataFrame = spark.read
      .format("csv")
      .option("sep", ",")
      .schema("sid STRING , cid STRING , sco DOUBLE")
      .load("data/score.txt")

    //导入隐式转换
    import spark.implicits._
    //导入函数包
    import org.apache.spark.sql.functions._

    /**
     * DSL api 类sql语法
     */
    val sumScoreDF: DataFrame = scoreDF
      .groupBy($"sid") //按照学号分组
      .agg(sum($"sco")) //对分数求和


    sumScoreDF.show()

    //保存数据
    sumScoreDF
      .write
      .format("csv")
      .option("sep", "\t")
      .mode(SaveMode.Overwrite)
      .save("data/sum_score")

  }

}
