package com.liyueheng.app.dataWarehouseDetail.behavior

import com.liyueheng.util.{ConfigLoader, SparkConf}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

object MonthlyActiveUser {
  def analyzeMAU(): Unit = {
    println("------------------ 分析月活跃用户 -----------------")
    val appName = ConfigLoader.getString("app.name")
    val databaseDwd = ConfigLoader.getString("databases.dwd")
    val databaseDws = ConfigLoader.getString("databases.dws")
    val spark: SparkSession = SparkConf.createSparkSession(appName)

    try {
      val userActDF = spark.table(s"$databaseDwd.user_act")

      val mauDF = userActDF
        .withColumn("name", when(col("act_date").between("2018-02-01", "2018-02-28"), "2月")
          .when(col("act_date").between("2018-03-01", "2018-03-31"), "3月")
          .when(col("act_date").between("2018-04-01", "2018-04-30"), "4月")
          .when(col("act_date").between("2018-05-01", "2018-05-31"), "5月")
          .otherwise(null))
        .groupBy("name")
        .agg(countDistinct("user").alias("value"))
        .filter(col("name").isNotNull)

      mauDF.write.mode("overwrite").format("hive").saveAsTable(s"$databaseDws.detail_mau")
    } finally {
      SparkConf.stopSparkSession(spark)
    }
  }
}
