package com.shujia.sql

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo06SparkOnHive {
  def main(args: Array[String]): Unit = {
    // 在代码中整合Hive
    val spark: SparkSession = SparkSession
      .builder()
      .appName("Demo06SparkOnHive")
      .master("local")
      .config("spark.sql.shuffle.partitions", "2")
      .enableHiveSupport() // 开启Hive的支持
      .getOrCreate()

    // 执行SQL
    spark.sql("show databases").show()

    // 切换库
    spark.sql("use spark")

    // 统计每门科目成绩Top10
    spark.sql(
      """
        |select  t1.cource_id
        |        ,t1.student_id
        |        ,t1.sco
        |        ,t1.rn
        |from (
        |    select  student_id
        |            ,cource_id
        |            ,sco
        |            ,row_number() over (partition by cource_id order by sco desc) as rn
        |    from score
        |) t1 where rn <= 10
        |""".stripMargin).show()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    // 用DSL实现
    val scoDF: DataFrame = spark.table("spark.score") // 将hive中的表转换成DF
    scoDF
      .withColumn("rn", row_number() over Window.partitionBy($"cource_id").orderBy($"sco".desc))
      .where($"rn" <= 10)
      .show()


  }

}
