package com.study.spark.scala.dataset

import org.apache.spark.sql.SparkSession

/**
 * 分组，聚合，排序 demo
 *
 * @author stephen
 * @date 2019-09-27 16:08
 */
object GroupAggOrderDemo {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("SelectDemo")
      .master("local[*]")
      .getOrCreate()

    // 隐式转换
    import org.apache.spark.sql.functions._
    import spark.implicits._

    val df = spark.createDataset(
      Seq((3, "zhangsan", "CN", 20),
        (4, "lisi", "CN", 30),
        (5, "wangwu", "JP", 40)))
      .toDF("id", "name", "country", "age")

    // 等价SQL: select country, count(*) from table group by country
    df.groupBy("country").count.show

    // 等价SQL: select distinct country from table
    df.selectExpr("country").distinct().show()

    // 等价SQL: select count(distinct country) from table
    val cnt = df.select("country").distinct().count()
    println(cnt)

    // 等价sql:
    // select country, count(*) from table
    // group by country
    // order by country desc
    df.groupBy($"country").count().sort($"country".desc).show()

    // 等价sql:
    // select country, count(*) from table
    // group by country
    // order by count(*) desc
    df.groupBy($"country").count().sort($"count" desc).show()

    // 等价sql:
    // select country, count(*) as cnt from table
    // group by country
    // order by cnt desc
    // 用withColumnRenamed函数给列重命名
    df.groupBy($"country")
      .count()
      .withColumnRenamed("count", "cnt")
      .sort($"cnt" desc)
      .show()

    // 等价sql:
    // select country, count(*) as cnt from table
    // group by country
    // order by cnt desc
    // 直接给count(*)来个别名
    df.groupBy($"country")
      .agg(count($"country").as("cnt"))
      .orderBy($"cnt" desc)
      .show()

    // 等价sql:
    // select country, count(country), max(age), avg(age)
    // from table
    // group by country
    df.groupBy($"country")
      .agg(count($"country"), max($"age"), avg($"age"))
      .show()
    df.groupBy($"country")
      .agg("country" -> "count", "age" -> "max", "age" -> "avg")
      .show()
    df.groupBy($"country")
      .agg(Map("country" -> "count", "age" -> "max", "age" -> "avg"))
      .show()
    df.groupBy($"country")
      .agg(("country", "count"), ("age", "max"), ("age", "avg"))
      .show()

    // 等价sql:
    // select country, count(country) cnt,
    //        max(age) max_age, avg(age) avg_age
    // from table
    // group by country
    // order by country, max_age desc
    df.groupBy($"country")
      .agg(count($"country").as("cnt"),
        max($"age") as "max_age",
        avg($"age") as "avg_age")
      .sort($"country", $"max_age" desc)
      .show()

    // orderBy函数就是sort函数的别名

    spark.stop()
  }
}
