package cn.itcast.up.model

import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * 回顾SparkSQL
  */
object TestSparkSQL {

  def main(args: Array[String]): Unit = {
    //构建SparkSession对象
    val spark: SparkSession = SparkSession.builder()
      .appName("TestSparkSQL")
      .master("local[*]")
      .getOrCreate()
    //隐式转换
    import spark.implicits._

    //加载数据源
    val sourceDF: DataFrame = List(
      ("java", 1),
      ("scala", 1),
      ("spark", 1),
      ("java", 1)
    ).toDF("word", "num")

//    sourceDF.show()
//    +-----+---+
//    | word|num|
//    +-----+---+
//    | java|  1|
//      |scala|  1|
//      |spark|  1|
//      | java|  1|
//      +-----+---+

    //统计/查询...
//    sourceDF.groupBy("word") String类型参数
//    sourceDF.groupBy(sourceDF.col("word")) Column
//    sourceDF.groupBy($"word") Column
//    sourceDF.groupBy('word) Column
    sourceDF
      //分组
      .groupBy('word)
      //累加
      .sum("num")
      .show()

/**
  * +-----+--------+
  * | word|sum(num)|
  * +-----+--------+
  * |scala|       1|
  * |spark|       1|
  * | java|       2|
  * +-----+--------+
  * */

  }
}
