package ds_industry_2025.ds.ds01.T3

import org.apache.spark.sql.SparkSession

/*
  4、根据dwd层表统计在两天内连续下单并且下单金额保持增长的用户，存入MySQL数据库shtd_result的usercontinueorder表(表结构如下)
  中，然后在Linux的MySQL命令行中根据订单总数、订单总金额、客户主键均为降序排序，查询出前5条，将SQL语句复制粘贴至客户端桌
  面【Release\任务B提交结果.docx】中对应的任务序号下，将执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任
  务序号下；
 */
object t4 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t4")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.table("dwd.dim_user_info")
      .where("etl_date=(select max(etl_date) from dwd.dim_user_info)")
      .createOrReplaceTempView("user")

    spark.table("dwd.fact_order_info")
      .where("etl_date=(select max(etl_date) from dwd.fact_order_info)")
      .createOrReplaceTempView("order")



    /*
      1.首先将当天的订单总数和订单总额拿到，以及今天的日期
      2.使用lead()开窗函数找到下一天的订单总数和订单总额(要想找上一天的那就是lag)还有下一天的日期
      3.使用where 判断两天日期的差值是否为1，并且第二天的订单总额是否大于当天，然后就可以进行订单总数和订单总额的相加了
      concat是拼接字符串的函数
     */
    val result = spark.sql(
      """
        |select
        |r2.user_id as userid,
        |r2.user_name as username,
        |concat(
        |date_format(r2.day1,"yyyyMMdd"),
        |"_",
        |date_format(r2.day2,"yyyyMMdd")
        |) as day,
        |(r2.money_sum1 + r2.money_sum2) as totalconsumption,
        |(r2.order_count1 + r2.order_count2) as totalorder
        |from(
        |select distinct
        |r1.user_id,r1.user_name,
        |r1.day as day1,
        |lead(r1.day) over(partition by r1.user_id,r1.user_name order by r1.day) as day2,
        |r1.order_count as order_count1,
        |lead(r1.order_count) over(partition by r1.user_id,r1.user_name order by r1.day) as order_count2,
        |r1.money_sum as money_sum1,
        |lead(r1.money_sum) over(partition by r1.user_id,r1.user_name order by r1.day) as money_sum2
        |from(
        |select distinct
        |o.user_id,
        |u.name as user_name,
        |to_date(o.create_time) as day,
        |count(*) over(partition by o.user_id,u.name,to_date(o.create_time)) as order_count,
        |sum(o.final_total_amount) over(partition by o.user_id,u.name,to_date(o.create_time)) as money_sum
        |from order as o
        |join user as u
        |on u.id=o.user_id
        |) as r1
        |) as r2
        |where datediff(r2.day1,r2.day2) =1 and r2.money_sum2 > r2.money_sum1
        |""".stripMargin)


    result.show






    spark.close()

  }

}
