package com.liyueheng.app.dataWarehouseDetail.behavior

import com.liyueheng.util.{ConfigLoader, SaveAsTable, SparkConf}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

object UserRetention {
  def analyzeRetention(): Unit = {
    println("------------------ 分析用户留存 -----------------")

    // 1. 初始化
    val appName     = ConfigLoader.getString("app.name")
    val databaseDwd = ConfigLoader.getString("databases.dwd")
    val databaseDws = ConfigLoader.getString("databases.dws")
    val spark: SparkSession = SparkConf.createSparkSession(appName)
    import spark.implicits._

    try {
      // 2. 计算每个用户的首日
      val result = spark.sql(
        """
          |SELECT
          |date,
          |new_add_user,
          |CONCAT(ROUND((day1_retained_users / new_add_user) * 100, 2), "%") AS day1_retention_rate,
          |CONCAT(ROUND((day3_retained_users / new_add_user) * 100, 2), "%") AS day3_retention_rate,
          |CONCAT(ROUND((day7_retained_users / new_add_user) * 100, 2), "%") AS day7_retention_rate
          |FROM
          |(
          |SELECT
          |first_day AS date,
          |COUNT(DISTINCT a.user) AS new_add_user,
          |COUNT(DISTINCT b.user) AS day1_retained_users,
          |COUNT(DISTINCT c.user) AS day3_retained_users,
          |COUNT(DISTINCT d.user) AS day7_retained_users
          |FROM
          |(
          |SELECT
          |user,
          |MIN(act_date) AS first_day
          |FROM
          |gmall_dwd.user_act
          |GROUP BY user
          |) a
          |LEFT JOIN gmall_dwd.user_act b ON a.user = b.user AND DATEDIFF(b.act_date, a.first_day) = 1
          |LEFT JOIN gmall_dwd.user_act c ON a.user = c.user AND DATEDIFF(c.act_date, a.first_day) = 3
          |LEFT JOIN gmall_dwd.user_act d ON a.user = d.user AND DATEDIFF(d.act_date, a.first_day) = 7
          |GROUP BY a.first_day
          |) P;
          |""".stripMargin)
      // 5. 保存结果
      result.show()
      result.write.mode("overwrite").format("hive").saveAsTable(s"$databaseDws.detail_user_retention")
      println("✅ 用户留存分析完成，结果表： " + s"$databaseDws.detail_user_retention")
    } finally {
      SparkConf.stopSparkSession(spark)
    }
  }

  def main(args: Array[String]): Unit = {
    // 调用分析函数
    analyzeRetention()
  }
}
