package com.shujia.spark.sql

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo08SparkOnHive {
  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder()
      .appName(this.getClass.getSimpleName.replace("$", ""))
      .master("local")
      .config("spark.sql.shuffle.partitions", "2")
      .enableHiveSupport() // 开启Hive的支持
      .getOrCreate()

    spark.sql("use default")

    spark.sql(
      """
        |select  tt1.id
        |        ,tt1.name
        |        ,tt1.clazz
        |        ,tt1.sum_score
        |        ,tt1.rn
        |from (
        |    select  t1.id
        |            ,t1.name
        |            ,t1.clazz
        |            ,t2.sum_score
        |            ,row_number() over(partition by t1.clazz order by t2.sum_score desc) as rn
        |    from student t1
        |    join (
        |        select  student_id as id
        |                ,sum(sco) as sum_score
        |        from score
        |        group by student_id
        |    ) t2 on t1.id = t2.id
        |) tt1 where tt1.rn <= 3
        |""".stripMargin).show()

    // 将Hive中的表转换成DF
    val stuDF: DataFrame = spark.table("student")
    val scoDF: DataFrame = spark.table("score")

    import spark.implicits._
    import org.apache.spark.sql.functions._

    scoDF
      .groupBy($"student_id")
      .agg(sum($"sco") as "sum_score")
      .join(stuDF, $"student_id" === $"id", "inner")
      .withColumn("rn", row_number() over Window.partitionBy($"clazz").orderBy($"sum_score".desc))
      .where($"rn" <= 3)
      .show()


  }

}
