package DianShang_2024.ds_02.indicator

import org.apache.hudi.DataSourceWriteOptions.{PARTITIONPATH_FIELD, PRECOMBINE_FIELD, RECORDKEY_FIELD}
import org.apache.hudi.QuickstartUtils.getQuickstartWriteConfigs
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.col

object indicator02 {
  def main(args: Array[String]): Unit = {
    /*
        根据dwd层表统计每人每天下单的数量和下单的总金额，存入Hudi的dws_ds_hudi层的user_consumption_day_aggr表中（表结构如下），然后使
        用spark -shell按照客户主键、订单总金额均为降序排序，查询出前5条，将SQL语句复制粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的
        任务序号下，将执行结果截图粘贴至客户端桌面【Release\任务B
     */
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("指标计算第二题")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    val fact_order_info_path="hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi02.db/fact_order_info"
    val dim_user_info_path="hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi02.db/dim_user_info"

    //  拿到fact_order_info表最新分区的数据
    val temp01=spark.read.format("hudi").load(fact_order_info_path)
    temp01.createOrReplaceTempView("temp01")
    spark.read.format("hudi").load(fact_order_info_path)
      .where("etl_date=(select max(etl_date) from temp01)")
      .dropDuplicates()
      .createOrReplaceTempView("order_info")

    //  拿到dim_user_info表最新分区的数据
    val temp02=spark.read.format("hudi").load(dim_user_info_path)
    temp02.createOrReplaceTempView("temp02")
    spark.read.format("hudi").load(dim_user_info_path)
      .where("etl_date=(select max(etl_date) from temp02)")
      .dropDuplicates()
      .createOrReplaceTempView("user_info")


    spark.sql(
      """
        |select distinct
        |user_id,
        |final_total_amount,
        |substr(create_time,1,4) as year,
        |substr(create_time,5,2) as month,
        |substr(create_time,7,2) as day
        |from order_info
        |""".stripMargin).createOrReplaceTempView("table01")

    val result=spark.sql(
      """
        |select
        |uuid() as uuid,
        |user_id,
        |user_name,
        |sum(money) as total_amount,
        |count(*) as total_count,
        |year,
        |month,
        |day
        |from (
        |select
        |t1.user_id,
        |t2.name as user_name,
        |t1.final_total_amount as money,
        |t1.year,
        |t1.month,
        |t1.day
        |from table01 as t1
        |join user_info as t2
        |on t2.id=t1.user_id
        |) as r1
        |group by year,month,day,user_id,user_name
        |""".stripMargin)

    val result_path="hdfs://192.168.40.110:9000/user/hive/warehouse/dws_ds_hudi02.db/user_consumption_day_aggr"

      spark.sql("use dws_ds_hudi02")

    //  创建表
    spark.sql("drop table if exists user_consumption_day_aggr")
    spark.sql(
      """
        |create table if not exists user_consumption_day_aggr(
        |uuid String,
        |user_id int,
        |user_name String,
        |total_amount double,
        |total_count int,
        |year int,
        |month int,
        |day int
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="uuid",
        |preCombineField="total_count",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(year,month,day)
        |""".stripMargin)

    result
      .withColumn("total_amount",col("total_amount").cast("double"))
      .withColumn("year",col("year").cast("int"))
      .withColumn("month",col("month").cast("int"))
      .withColumn("day",col("day").cast("int"))
      .write.mode("append")
      .format("hudi")
      .options(getQuickstartWriteConfigs)
      .option(RECORDKEY_FIELD.key(),"uuid")
      .option(PRECOMBINE_FIELD.key(),"total_count")
      .option(PARTITIONPATH_FIELD.key(),("year,month,day"))
      .option("hoodie.table.name","user_consumption_day_aggr")
      .save(result_path)











    spark.close()
  }

}
