package ds_industry_2025.ds.ds_02.T3

import org.apache.hudi.DataSourceWriteOptions._
import org.apache.hudi.QuickstartUtils.getQuickstartWriteConfigs
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions._

import java.text.SimpleDateFormat
import java.util.{Calendar, Properties}

/*
    2、根据dwd层表统计每人每天下单的数量和下单的总金额，存入Hudi的dws_ds_hudi层的user_consumption_day_aggr表中（表结构如下）
    ，然后使用spark -shell按照客户主键、订单总金额均为降序排序，查询出前5条，将SQL语句复制粘贴至客户端桌面【Release\任务B提交结
    果.docx】中对应的任务序号下，将执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下；
 */
object t2 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t1")
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions", "org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

     val user_hdfs="hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi.db/dim_user_info"
     val order_hdfs="hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi.db/fact_order_info"

    spark.read.format("hudi").load(user_hdfs)
      .createOrReplaceTempView("t1")

    spark.read.format("hudi").load(user_hdfs)
      .where("etl_date=(select max(etl_date) from t1)")
      .createOrReplaceTempView("user_info")

    spark.read.format("hudi").load(order_hdfs)

      .createOrReplaceTempView("order_info")

    //  todo 这里要特别注意uuid要特意放到最外面一层，否则会因为uuid去重去不干净
    val result = spark.sql(
      """
        |select
        |uuid() as uuid,
        |*
        |from(
        |select distinct
        |o.user_id,
        |u.name as user_name,
        |sum(o.final_total_amount)
        |over(partition by Year(o.create_time),Month(o.create_time),day(o.create_time),o.user_id,u.name) as total_amount,
        |count(*)
        |over(partition by  Year(o.create_time),Month(o.create_time),day(o.create_time),o.user_id,u.name) as total_count,
        |year(o.create_time) as year,
        |month(o.create_time) as month,
        |day(o.create_time) as day
        |from order_info as o
        |join user_info as u
        |on u.id=o.user_id
        |) as r1
        |""".stripMargin)


    result.show

    result.createOrReplaceTempView("result")
    spark.sql("select * from result order by user_id desc,total_amount desc limit 5").show


//   result
//     .write.format("hudi").mode("append")
//     .options(getQuickstartWriteConfigs)
//     .option(RECORDKEY_FIELD.key(), "uuid")
//     .option(PRECOMBINE_FIELD.key(), "total_count")
//     .option(PARTITIONPATH_FIELD.key(), "year,month,day")
//     .option("hoodie.table.name", "user_consumption_day_aggr")
//     .save("hdfs://192.168.40.110:9000/user/hive/warehouse/dws_ds_hudi.db/user_consumption_day_aggr")



    spark.close()

  }

}
