package ds_industry_2025.ds.ds_07.T3


import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

import java.util.Properties

/*
      3、根据dwd层的数据，请计算连续两天下单的用户与已下单用户的占比，将结果存入MySQL数据库shtd_result的userrepurchasedrate表
      中(表结构如下)，然后在Linux的MySQL命令行中查询结果数据，将SQL语句复制粘贴至客户端桌面【Release\任务B提交结果.docx】中对应
      的任务序号下，将执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下；
 */
object t6 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("T2")
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions", "org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    //  todo 连续两天下单的用户3个   连续下单并且保持增长的两个

    val conn=new Properties()
    conn.setProperty("user","root")
    conn.setProperty("password","123456")
    conn.setProperty("driver","com.mysql.jdbc.Driver")

    val data = spark.read
      .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false", "order_info", conn)

    data.createOrReplaceTempView("data")


    //  todo 计算已经下单过的用户
    val all_number = data.select("user_id").distinct().count()
    println(s"下单过的用户数量:${all_number}")

    //  todo 计算连续下单，并且下单金额持续增长的用户数量
    val user2 = spark.sql(
      """
        |select
        |user_id
        |from(
        |select distinct
        |user_id,
        |day,day2,
        |total_amount,
        |lead(total_amount) over(partition by user_id order by day) as total_amount2
        |from(
        |select distinct
        |user_id,
        |to_date(create_time) as day,
        |lead(create_time) over(partition by user_id order by to_date(create_time)) as day2,
        |sum(final_total_amount) over(partition by user_id,to_date(create_time)) total_amount
        |from data
        |)as r1
        |) as r2
        |where datediff(day2,day)=1 and total_amount2 > total_amount
        |""".stripMargin).count()

    println(s"连续下单并且金额增长的用户数量:${user2}")

    val result = spark.sql(
        s"""
           |select
           |${all_number} as all_user,
           |${user2} as user2
           |""".stripMargin)
      .withColumn(
        "bfb",
        concat(
          (round((col("user2") / col("all_user")), 3) * 100).cast("String"),
          lit("%")
        )
      )

    result.show





    spark.close()
  }
}
