package ods_industry_2024.ods_02.indicator_count_hudi.indicator_06

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.hudi.HoodieSparkSessionExtension
import org.apache.spark.serializer.KryoSerializer
object test_03 {
  def main(args: Array[String]): Unit = {
    /*
          请根据dwd_ds_hudi层的相关表，计算2020年销售量前10的商品，销售额前10的商品，存入ClickHouse数据库shtd_result的topten表中（
          表结构如下），然后在Linux的ClickHouse命令行中根据排名升序排序，查询出前5条，将SQL语句复制粘贴至客户端桌面【Release\任务B提交结果.docx】中对
          应的任务序号下，将执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下
     */
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("二刷第六套卷子指标第三题")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

      spark.table("dwd_ds_hudi_02.fact_order_info").createOrReplaceTempView("temp01")
      spark.table("dwd_ds_hudi_02.fact_order_info")
        .where("etl_date=(select max(etl_date) from temp01)")
        .distinct()
        .createOrReplaceTempView("order_info")


    spark.table("dwd_ds_hudi_02.fact_order_detail").createOrReplaceTempView("temp02")
    spark.table("dwd_ds_hudi_02.fact_order_detail")
      .where("etl_date=(select max(etl_date)from temp02)")
      .distinct()
      .createOrReplaceTempView("order_detail")

    //  先拿到销售量前十的商品
    spark.sql(
      """
        |select
        |topquantityid,topquantityname,topquantity,
        |row_number() over(order by topquantity desc) as sequence
        |from(
        |select distinct
        |topquantityid,topquantityname,
        |sum(number) over(partition by topquantityid,topquantityname) as topquantity
        |from(
        |select distinct
        |o.id as order_id,
        |d.sku_id as topquantityid,
        |d.sku_name as topquantityname,
        |d.sku_num as number
        |from order_info as o
        |join order_detail  as d
        |on d.order_id=o.id
        |where Year(to_date(o.create_time,'yyyyMMdd'))=2020
        |) as r1
        |) as r2
        |limit 10
        |""".stripMargin).createOrReplaceTempView("result01")

    //  然后拿到销售额前十的
    spark.sql(
      """
        |select
        |toppriceid,toppricename,topprice,
        |row_number() over(order by topprice desc) as sequence
        |from(
        |select distinct
        |d.sku_id as toppriceid,
        |d.sku_name as toppricename,
        |sum(o.final_total_amount) over(partition by d.sku_id,d.sku_name) as topprice
        |from order_info as o
        |join order_detail as d
        |on d.order_id=o.id
        |) as r1
        |limit 10
        |""".stripMargin).createOrReplaceTempView("result02")

    val result=spark.sql(
      """
        |select
        |t1.topquantityid,t1.topquantityname,t1.topquantity,
        |t2.toppriceid,t2.toppricename,t2.topprice,
        |t1.sequence
        |from result01 as t1
        |join result02 as t2
        |on t2.sequence=t1.sequence
        |""".stripMargin)

result.show

    result.write.mode("append")
      .format("jdbc")
      .option("url","jdbc:clickhouse://192.168.40.110:8123/hudi_indicator")
      .option("user","default")
      .option("password","")
      .option("dbtable","topten_06")
      .option("driver","com.clickhouse.jdbc.ClickHouseDriver")
      .save()

    println("完成")




    spark.close()
  }

}
