package cn.itcast.model.base
import org.apache.spark.sql.{Column, DataFrame, Dataset, Row}

object BirthdayModel extends BaseModel {
  override def setAppName(): String = "BirthdayModel"

  override def importSpark(): Unit = ???
  import spark.implicits._
  import org.apache.spark.sql.functions._
  override def setTagId(): Int = ???

  override def computeTag(fiveRuleDF: DataFrame, hbaseSource: DataFrame): DataFrame = {
    val birthday: Column = regexp_replace(hbaseSource.col("birthday"),"-","")
    val birthdayDF: DataFrame = hbaseSource.select($"id".as("userId"),birthday.as("birthday"))
    val fiveDF: DataFrame = fiveRuleDF.map(row => {
      val id: String = row.getAs[String]("id").toString
      val rule: String = row.getAs[String]("rule")
      val arr: Array[String] = rule.split("-")
      val start = arr(0)
      val end = arr(1)
      (id, start, end)
    }).collect().toList.toDF("tagIds", "start", "end")
    val joinDF: DataFrame = birthdayDF.join(fiveDF)
      .where($"birthday".between($"start", $"end"))
    val newDF: DataFrame = joinDF.select($"userId",$"tagIds")
    newDF
  }

  def main(args: Array[String]): Unit = {
    val mysqlSource: DataFrame = loadMysqlSource()
    val fourRuleMap: Map[String, String] = getFourRuleData(mysqlSource)
    val fiveRuleDF: Dataset[Row] = getFiveRuleData(mysqlSource)
    val hbaseSource: DataFrame = loadHBaseSource(fourRuleMap)
    val newDF: DataFrame = computeTag(fiveRuleDF,hbaseSource)
    val oldDF: DataFrame = loadOldDF()
    val result: DataFrame = mergeTag(newDF,oldDF)
    saveResult(result)
  }
}
