package com.liyueheng.app.dataWarehouseDetail.rfm

import com.liyueheng.util.{ConfigLoader, SaveAsTable, SparkConf}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.SparkSession

object RfmSegmentSummary {
  def statSegment(): Unit = {
    println("------------------ 计算RFM分数等级 -----------------")
    val spark: SparkSession = SparkConf.createSparkSession("RFM-Segment")
    import spark.implicits._
    val dws = ConfigLoader.getString("databases.dws")

    val rfmDF = spark.table(s"$dws.detail_rfm_score")
    val avgDF = spark.table(s"$dws.detail_rfm_avg").first()

    val r_avg = avgDF.getAs[Double]("r_avg")
    val f_avg = avgDF.getAs[Double]("f_avg")
    val m_avg = avgDF.getAs[Double]("m_avg")

    val result = rfmDF.withColumn("segment",
      when($"r_score" > r_avg && $"f_score" > f_avg && $"m_score" > m_avg, "重要价值客户")
        .when($"r_score" > r_avg && $"f_score" < f_avg && $"m_score" > m_avg, "重要发展客户")
        .when($"r_score" > r_avg && $"f_score" < f_avg && $"m_score" < m_avg, "一般发展客户")
        .when($"r_score" < r_avg && $"f_score" > f_avg && $"m_score" < m_avg, "一般保持客客户")
        .when($"r_score" < r_avg && $"f_score" > f_avg && $"m_score" > m_avg, "重要保持客户")
        .when($"r_score" > r_avg && $"f_score" > f_avg && $"m_score" < m_avg, "一般价值客户")
        .when($"r_score" < r_avg && $"f_score" < f_avg && $"m_score" < m_avg, "一般挽留客户")
        .when($"r_score" < r_avg && $"f_score" < f_avg && $"m_score" > m_avg, "重要挽留客户")
    )

    // 保存用户等级表
    SaveAsTable.saveAsTable(result, s"$dws.detail_rfm_segment")

    // 各等级用户数量统计
    val summary = result.groupBy("segment").count()
    SaveAsTable.saveAsTable(summary, s"$dws.detail_rfm_segment_summary")

    SparkConf.stopSparkSession(spark)
  }
}
