package ds_industry_2025.ds.YangJuan_2024.T3

import org.apache.spark.sql.SparkSession
/*
    根据 dwd 层表统计在两天内连续下单并且下单金额保持增长的用户，存入 MySQL 数
据库 shtd_result 的 usercontinueorder 表(表结构如下)中，然后在 Linux 的 MySQL 命令
行中根据订单总数、订单总金额、客户主键均为降序排序，查询出前 5 条，将 SQL 语
句复制粘贴至客户端桌面【Release\任务 B 提交结果.docx】中对应的任务序号下，将执
行结果截图粘贴至客户端桌面【Release\任务 B 提交结果.docx】中对应的任务序号下。
 */
object t4 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t4")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.extension")
      .enableHiveSupport()
      .getOrCreate()

    spark.table("dwd.dim_user_info")
      .createOrReplaceTempView("user_info")

    spark.table("dwd.fact_order_info")
      .createOrReplaceTempView("order_info")

    //  todo 先拿到每天的金额和订单总数
    val result = spark.sql(
      """
        |select
        |userid,username,
        |concat(date_format(day,"yyyyMMdd"),"_",date_format(day2,"yyyyMMdd")) as day,
        |(today_money + money2) as totalconsumption ,
        |(today_count + count2) as totalorder
        |from(
        |select distinct
        |userid,username,
        |day,
        |lead(day) over(partition by userid,username order by day) as day2,
        |today_money,
        |lead(today_money) over(partition by userid,username order by day) money2,
        |today_count,
        |lead(today_count) over(partition by userid,username order by day) as count2
        |from(
        |select distinct
        |o.user_id as userid,
        |u.name as username,
        |to_date(o.create_time) as day,
        |sum(o.final_total_amount) over(partition by o.user_id,u.name,to_date(o.create_time)) as today_money,
        |count(*) over(partition by o.user_id,u.name,to_date(o.create_time)) as today_count
        |from  order_info as o
        |join user_info as u
        |on u.id=o.user_id
        |) as r1
        |) as r2
        |where datediff(day,day2)=1 and money2 > today_money
        |""".stripMargin)

    result.show




    spark.close()
  }

}
