package ods_industry_2024.ods_02.indicator_count_hudi.indicator_06

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, to_date}

object test_04 {
  def main(args: Array[String]): Unit = {

    /*
          4、请根据dwd_ds_hudi层的相关表，计算出2020年每个省份所在地区的订单金额的中位数,存入ClickHouse数据库shtd_result的nationmedian表
          中（表结构如下），然后在Linux的ClickHouse命令行中根据地区表主键，省份表主键均为升序排序，查询出前5条，将SQL语句复制粘贴至客户端桌
          面【Release\任务B提交结果.docx】中对应的任务序号下，将执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下；
     */


    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("二刷第六套卷子的指标计算第四题")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.table("dwd_ds_hudi_02.fact_order_info").createOrReplaceTempView("temp01")
    spark.table("dwd_ds_hudi_02.fact_order_info")
      .where("etl_date=(select max(etl_date) from temp01)")
      .distinct()
      .createOrReplaceTempView("order_info")

    spark.table("dwd_ds_hudi_02.dim_province").createOrReplaceTempView("temp02")
    spark.table("dwd_ds_hudi_02.dim_province")
      .where("etl_date=(select max(etl_date) from temp02)")
      .distinct()
      .createOrReplaceTempView("province")

    spark.table("dwd_ds_hudi_02.dim_region").createOrReplaceTempView("temp03")
    spark.table("dwd_ds_hudi_02.dim_region")
      .where("etl_date=(select max(etl_date) from temp03)")
      .distinct()
      .createOrReplaceTempView("region")


    //  先拿到省份的中位数
val result=spark.sql(
  """
    |with
    |province_temp as (
    |select distinct
    |o.province_id as provinceid,
    |p.name as provincename,
    |p.region_id as regionid,
    |percentile(o.final_total_amount,0.5) over(partition by o.province_id,p.name)  as  provincemedian
    |from order_info as o
    |join province as p
    |on p.id=o.province_id
    |),
    |region_temp as (
    |select distinct
    |r.id as regionid,
    |r.region_name as regionname,
    |percentile(o.final_total_amount,0.5) over(partition by r.id,r.region_name) as regionmedian
    |from order_info as o
    |join province as p
    |on p.id=o.province_id
    |join region as r
    |on r.id=p.region_id
    |)
    |
    |select
    |t1.provinceid,t1.provincename,t1.regionid,t2.regionname,t1.provincemedian,t2.regionmedian
    |from province_temp as t1
    |join region_temp as t2
    |on t2.regionid=t1.regionid
    |""".stripMargin)



    result.show




    spark.close()
  }

}
