package ds_industry_2025.ds.ds_07.T3

import org.apache.spark.sql.{SparkSession, functions}
import org.apache.spark.sql.functions._

import java.util.Properties
/*
    3、根据dwd层的数据，请计算连续两天下单的用户与已下单用户的占比，将结果存入MySQL数据库shtd_result的userrepurchasedrate表中
    (表结构如下)，然后在Linux的MySQL命令行中查询结果数据，将SQL语句复制粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任
    务序号下，将执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下；
 */
object t3 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t3")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    val data = spark.table("dwd.fact_order_info")
      .where("etl_date=(select max(etl_date) from dwd.fact_order_info)")

    data.createOrReplaceTempView("data")

    //   todo 先计算已经下单用户的数量
    val all_user_number = data.select("user_id").distinct().count()

    //  todo 然后计算连续两天下单的人数
    //  要记住下面datediff() 的计算逻辑是拿前面的参数减去后面的参数，一定要记住，否则会报错
    val user2 = spark.sql(
      s"""
         |select distinct
         |user_id
         |from(
         |select distinct
         |user_id,
         |to_date(create_time) as day1,
         |lead(to_date(create_time)) over(partition by user_id order by create_time) as day2
         |from data
         |) as r1
         |where datediff(day2,day1)=1
         |group by user_id
         |""".stripMargin).count()


    //  todo 组成dataframe
    val result = spark.sql(
        s"""
           |select
           |cast('${all_user_number}' as int) as purchaseduser,
           |cast('${user2}' as int) as repurchaseduser
           |""".stripMargin)
      .withColumn(
        "repurchaserate",
        functions.concat(
          (functions.round((col("repurchaseduser") / col("purchaseduser")) * 100, 1))
          , lit("%")
        )
      )

    result.show

    val conn=new Properties()
    conn.setProperty("user","root")
    conn.setProperty("password","123456")
    conn.setProperty("driver","com.mysql.jdbc.Driver")

    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_result?useSSL=false","userrepurchasedrate",conn)













    spark.close()
  }

}
