package com.shujia.sql

import org.apache.spark.sql.SparkSession

object Demo07SparkOnHive {
  def main(args: Array[String]): Unit = {
    // 通过Spark代码访问Hive
    /**
     * 1、在Maven中增加依赖
     * <dependency>
     * <groupId>org.apache.spark</groupId>
     * <artifactId>spark-hive_2.11</artifactId>
     * <version>2.4.5</version>
     * </dependency>
     *
     * 2、启动Hive的MetaStore
     * hive --service metastore
     *
     * 3、将Hive的配置文件 hive-site.xml 放入resources目录
     *
     * 4、在构建SparkSession开启Hive的支持
     */

    // 创建SparkSession
    val spark: SparkSession = SparkSession
      .builder()
      .appName("Demo07SparkOnHive")
      .master("local")
      .config("spark.sql.shuffle.partitions", "2")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("show databases").show()
    spark.sql("show tables").show()
    spark.sql("use stu").show()
    spark.sql("show tables").show()

    spark.sql(
      """
        |select  tt1.id
        |        ,tt1.name
        |        ,tt1.clazz
        |        ,tt1.sum_score
        |        ,tt1.rn
        |from (
        |    select  t1.id
        |            ,t1.name
        |            ,t1.clazz
        |            ,t2.sum_score
        |            ,row_number() over(partition by t1.clazz order by t2.sum_score desc) as rn
        |    from students t1
        |    join (
        |        select  id
        |                ,sum(score) as sum_score
        |        from score
        |        group by id
        |    ) t2 on t1.id = t2.id
        |) tt1 where tt1.rn <= 3
        |""".stripMargin)
      .show(50)

  }

}
