package com.liyueheng.app.dataWarehouseDetail.rfm

import com.liyueheng.util.{ConfigLoader, SaveAsTable, SparkConf}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.SparkSession

object RfmScore {
  println("------------------ 计算 R/F/M 分数 -----------------")
  def calcRfmScores(): Unit = {
    val spark: SparkSession = SparkConf.createSparkSession("RFM-Score")
    val dws = ConfigLoader.getString("databases.dws")

    import spark.implicits._

    // ----------------- 读取 R/F/M 数据 -------------------
    val lastPurchaseDF = spark.table(s"$dws.detail_rfm_last_purchase_date")
    val frequencyDF = spark.table(s"$dws.detail_rfm_frequency")
    val monetaryDF = spark.table(s"$dws.detail_rfm_monetary")

    // 设置基准时间（与你文档中一致）
    val referenceDate = "2018-04-16"

    // ----------------- R 打分 -------------------
    val rScore = lastPurchaseDF
      .withColumn("days_since", datediff(lit(referenceDate), $"last_purchase_date"))
      .withColumn("r_score", when($"days_since" <= 15, 5)
        .when($"days_since" <= 30, 4)
        .when($"days_since" <= 45, 3)
        .when($"days_since" <= 60, 2)
        .otherwise(1))

    // ----------------- F 打分 -------------------
    val fScore = frequencyDF
      .withColumn("f_score", when($"frequency" <= 1, 1)
        .when($"frequency" <= 2, 2)
        .when($"frequency" <= 3, 3)
        .when($"frequency" <= 4, 4)
        .otherwise(5))

    // ----------------- M 打分 -------------------
    val mScore = monetaryDF
      .withColumn("m_score", when($"monetary" <= 200, 1)
        .when($"monetary" <= 399, 2)
        .when($"monetary" <= 599, 3)
        .when($"monetary" <= 799, 4)
        .otherwise(5))

    // ----------------- 合并 RFM 打分 -------------------
    val rfmDF = rScore.select("user", "r_score")
      .join(fScore.select("user", "f_score"), Seq("user"))
      .join(mScore.select("user", "m_score"), Seq("user"))

    SaveAsTable.saveAsTable(rfmDF, s"$dws.detail_rfm_score")

    // ----------------- 均值输出 -------------------
    val avgScores = rfmDF.agg(
      avg("r_score").alias("r_avg"),
      avg("f_score").alias("f_avg"),
      avg("m_score").alias("m_avg")
    )

    SaveAsTable.saveAsTable(avgScores, s"$dws.detail_rfm_avg")

    SparkConf.stopSparkSession(spark)
  }
}
