package DianShang_2024.ds_07.indicator

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.col

import java.util.Properties

object indicator03 {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("指标计算第三题")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
//      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
//      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use dwd07")
    spark.table("dwd07.fact_order_info")
      .where("etl_date=(select max(etl_date) from fact_order_info)")
      .where(col("create_time").isNotNull)
      .createOrReplaceTempView("order_info")


    //  所有下过单的用户数量
    val all_user=spark.sql(
      """
        |select distinct
        |user_id
        |from order_info
        |""".stripMargin).count()

    println("所有下过单的用户数量:",all_user)

    //  连续两天下单的用户数量
   val continuous_user= spark.sql(
      """
        |select distinct
        |user_id
        |from(
        |select
        |user_id,
        |to_date(create_time) as day,
        |lag(to_date(create_time),1,null) over(partition by user_id order by to_date(create_time)) as day2
        |from order_info
        |) as r1
        |where datediff(r1.day,r1.day2)=1
        |""".stripMargin).count()

    println("存在连续两天下单用户的数量:",continuous_user)


    val result=spark.sql(
      s"""
        |select
        |purchaseduser,
        |repurchaseduser,
        |concat(round((repurchaseduser / purchaseduser )*100,1),"%") as repurchaserate
        |from(
        |select
        |${all_user} as purchaseduser,
        |${continuous_user} as repurchaseduser
        |from order_info
        |limit 1
        |) as r1
        |""".stripMargin)

    val connect=new Properties()
    connect.setProperty("user","root")
    connect.setProperty("password","123456")
    connect.setProperty("driver","com.mysql.jdbc.Driver")




    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_result?useSSL=false","userrepurchasedrate07",connect)



    spark.close()
  }

}
