package ds_industry_2025.ds.ds_06.T3

import org.apache.hudi.DataSourceWriteOptions.{PARTITIONPATH_FIELD, PRECOMBINE_FIELD, RECORDKEY_FIELD}
import org.apache.hudi.QuickstartUtils.getQuickstartWriteConfigs
import org.apache.spark.sql.SparkSession

/*
    2、根据dwd_ds_hudi层表统计每个省每月下单的数量和下单的总金额，并按照year，month，region_id进行分组,按照total_amount降序
    排序，形成sequence值，将计算结果存入Hudi的dws_ds_hudi数据库province_consumption_day_aggr表中（表结构如下），然后使
    用spark-shell根据订单总数、订单总金额、省份表主键均为降序排序，查询出前5条，在查询时对于订单总金额字段将其转为bigint类型（
    避免用科学计数法展示），将SQL语句复制粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下，将执行结果截图粘贴
    至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下;
 */
object t6 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t1")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.sql.hudi.HoodieSparkSessionExtension")
      .config("spark.sql.parquetLegacyFormat","true")
      .enableHiveSupport()
      .getOrCreate()

    val province_hdfs="hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi.db/dim_province"
    val region_hdfs="hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi.db/dim_region"
    val order_hdfs="hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi.db/fact_order_info"

    spark.read.format("hudi").load(province_hdfs)
      .createTempView("t1")
    spark.read.format("hudi").load(region_hdfs)
      .createTempView("t2")

    spark.read.format("hudi").load(province_hdfs)
      .where("etl_date=(select max(etl_date) from t1)")
      .createTempView("province")

    spark.read.format("hudi").load(region_hdfs)
      .where("etl_date=(select max(etl_date) from t2)")
      .createTempView("region")

    spark.sql("desc region").show

    spark.read.format("hudi").load(order_hdfs)
      .createTempView("order")

    val result = spark.sql(
      """
        |select
        |uuid() as uuid,
        |*
        |from(
        |select distinct
        |province_id,province_name,
        |region_id,region_name,
        |total_amount,total_count,
        |row_number() over(partition by year,month,region_id,region_name order by total_amount desc ) as sequence,
        |year,month
        |from(
        |select distinct
        |o.province_id,
        |p.name as province_name,
        |p.region_id,
        |r.region_name,
        |sum(o.final_total_amount)
        |over(partition by p.region_id,r.region_name,o.province_id,p.name,year(o.create_time),month(o.create_time)) as total_amount,
        |count(*)
        |over(partition by  p.region_id,r.region_name,o.province_id,p.name,year(o.create_time),month(o.create_time)) as total_count,
        |year(o.create_time) as year,
        |month(o.create_time) as month
        |from order as o
        |join province as p
        |on p.id=o.province_id
        |join region as r
        |on r.id=p.region_id
        |) as r1
        |) as r2
        |""".stripMargin)

    result.show

    spark.sql("create database if  not exists   dws_ds_hudi")
    spark.sql("use dws_ds_hudi")
    spark.sql(
      """
        |create table if  not exists dws_ds_hudi.province_consumption_day_agg(
        |uuid String,
        |province_id int,
        |province_name String,
        |region_id int,
        |region_name String,
        |total_amount Double,
        |total_count int,
        |sequence int,
        |year int,
        |month int
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="uuid",
        |preCombineField="total_count",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(year,month)
        |""".stripMargin)




    result.write.format("hudi")
      .options(getQuickstartWriteConfigs)
      .option(RECORDKEY_FIELD.key(),"uuid")
      .option(PARTITIONPATH_FIELD.key(),"year,month")
      .option(PRECOMBINE_FIELD.key(),"total_count")
      .option("hoodie.table.name","province_consumption_day_agg")
      .mode("append")
      .save("hdfs://192.168.40.110:9000/user/hive/warehouse/dws_ds_hudi.db/province_consumption_day_agg")



    spark.close()
  }

}
