package ds_industry_2025.ds.ds01.T3

import org.apache.spark.sql.SparkSession

import java.util.Properties

/*
      4、根据dwd层表统计在两天内连续下单并且下单金额保持增长的用户，存入MySQL数据库shtd_result的usercontinueorder表(表结构
      如下)中，然后在Linux的MySQL命令行中根据订单总数、订单总金额、客户主键均为降序排序，查询出前5条，将SQL语句复制粘贴至客户端
      桌面【Release\任务B提交结果.docx】中对应的任务序号下，将执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对
      应的任务序号下；
 */
object t5 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t4")
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions", "org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    val conn=new Properties()
    conn.setProperty("user","root")
    conn.setProperty("password","123456")
    conn.setProperty("driver","com.mysql.jdbc.Driver")

    spark.read
      .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false","order_info",conn)
      .createOrReplaceTempView("order_info")

    spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false","user_info",conn)
      .createOrReplaceTempView("user_info")

    val result = spark.sql(
      """
        |select
        |user_id,user_name,
        |concat_ws("_",date_format(day,"yyyyMMdd"),date_format(day2,"yyyyMMdd")) as day,
        |(money + money2) as  totalconsumption,
        |(count + count2) as totalorder
        |from(
        |select distinct
        |*,
        |lead(money,1,0) over(partition by user_id,user_name order by day) as money2,
        |lead(day,1,"1970-01-01 00:00:00") over(partition by user_id,user_name order by day) as day2,
        |lead(count,1,0) over(partition by user_id,user_name order by day) as count2
        |from(
        |select distinct
        |o.user_id,
        |u.name as user_name,
        |sum(o.final_total_amount) over(partition by o.user_id,u.name,to_date(o.create_time)) as money,
        |count(*) over(partition by o.user_id,u.name,to_date(o.create_time)) as count,
        |to_date(o.create_time) as day
        |from order_info as o
        |join user_info as u
        |on u.id=o.user_id
        |) as r1
        |) as r2
        |where datediff(day2,day)=1 and money2 > money
        |""".stripMargin)

    result.show(false)


    spark.close()
  }

}
