package cn.itcast.model.base
import org.apache.spark.sql.{Column, DataFrame}

//消费周期标签开发
object CycleModel extends BaseModel {
  override def setAppName(): String = "CycleModel"

  override def importSpark(): Unit = ???

  import spark.implicits._
  import org.apache.spark.sql.functions._
  override def setTagId(): Int = ???

  override def computeTag(fiveRuleDF: DataFrame, hbaseSource: DataFrame): DataFrame = {
    val fiveDF: DataFrame = fiveRuleDF.map(row => {
      val id: String = row.getAs[Int]("id").toString
      val rule: String = row.getAs[String]("rule")
      val arr: Array[String] = rule.split("-")
      val start = arr(0)
      val end = arr(1)
      (id, start, end)
    }).collect().toList.toDF("tagIds", "start", "end")
    val maxDF: DataFrame = hbaseSource.groupBy($"memberId")
      .agg(max($"finishTime").as("finishTime"))
    val dayNum: Column = datediff(
      current_timestamp(),
      from_unixtime(maxDF.col("finishTime"))
    )
    val dayDF: DataFrame = maxDF.select($"memberId".as("userId"),dayNum.as("dayNum"))
    val newDF: DataFrame = dayDF.join(fiveDF)
      .where($"dayNum".between($"start", $"end"))
      .select($"userId", $"tagIds")
    newDF
  }

  def main(args: Array[String]): Unit = {
    //1. Spark对象
    //2. 导包(省略)
    //3.加载MySQL数据源
    val mysqlSource: DataFrame = loadMysqlSource()
    //4. 获取4级规则数据
    val fourRuleMap: Map[String, String] = getFourRuleData(mysqlSource)
    //5. 获取5级规则数据
    val fiveRuleDF: DataFrame = getFiveRuleData(mysqlSource)
    //6. 加载HBase数据源
    val hbaseSource: DataFrame = loadHBaseSource(fourRuleMap)
    //7. 进行标签计算
    val newDF: DataFrame = computeTag(fiveRuleDF, hbaseSource)
    //8. 数据落地
    val oldDF: DataFrame = loadOldDF()
    val result: DataFrame = mergeTag(newDF, oldDF)
    saveResult(result)
  }
}
