package cn.itcast.model.matchtag

import java.util.Properties

import cn.itcast.model.bean.{HBaseMeta, TagRule}
import org.apache.commons.lang3.StringUtils
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.{DataFrame, SparkSession}

object JobModel {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder()
      .appName("JobModel")
      .master("local[*]")
      .getOrCreate()
    import spark.implicits._
    import org.apache.spark.sql.functions._
    //3. 加载MySQL数据源
    val url = "jdbc:mysql://bd001:3306/tags_new?useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&user=root&password=123456"
    val table = "tbl_basic_tag"
    val properties = new Properties()
    val mysqlSource: DataFrame = spark.read.jdbc(url, table, properties)
    val meta: HBaseMeta = mysqlSource.select('rule).where("id = 610")
      .map(row => {
        val ruleStr: String = row.getAs[String]("rule")
        val map: Map[String, String] = ruleStr.split("##")
          .map(line => {
            val arr: Array[String] = line.split("=")
            (arr(0), arr(1))
          }).toMap
        HBaseMeta(map)
      }).collect()(0)
    val fiveRuleList: List[TagRule] = mysqlSource
      .select('id, 'rule)
      .where("pid=610")
      .map(row => {
        val id: String = row.getAs[Int]("id").toString
        val rule: String = row.getAs[String]("rule")
        TagRule(id, rule)
      }).collect().toList
    val hbaseSource: DataFrame = spark.read
      .format("cn.itcast.model.util.HbaseSource")
      .option(HBaseMeta.ZKHOSTS, meta.zkHosts)
      .option(HBaseMeta.ZKPORT, meta.zkPort)
      .option(HBaseMeta.HBASETABLE, meta.hbaseTable)
      .option(HBaseMeta.FAMILY, meta.family)
      .option(HBaseMeta.SELECTFIELDS, meta.selectFields)
      .load()
    val getTag: UserDefinedFunction = udf((job: String) => {
      var tagId = ""
      for (tagRule <- fiveRuleList) {
        if (job == tagRule.rule) {
          tagId = tagRule.id
        }
      }
      tagId
    })
    val newDF: DataFrame = hbaseSource.select('id.as("userId"), getTag('job).as("tagIds"))
    val oldDF: DataFrame = spark.read
      .format("cn.itcast.model.util.HbaseSource")
      .option(HBaseMeta.ZKHOSTS, meta.zkHosts)
      .option(HBaseMeta.ZKPORT, meta.zkPort)
      .option(HBaseMeta.HBASETABLE, "test34") //加载用户画像表
      .option(HBaseMeta.FAMILY, meta.family)
      .option(HBaseMeta.SELECTFIELDS, "userId,tagIds")
      .load()
    val joinDF: DataFrame = newDF.join(oldDF, newDF.col("userId") === oldDF.col("userId"), "full")
    val mergeTag: UserDefinedFunction = udf((newTag: String, oldTag: String) => {
      if (StringUtils.isBlank(newTag)) {
        oldTag
      } else if (StringUtils.isBlank(oldTag)) {
        newTag
      } else if (StringUtils.isNotBlank(newTag) && StringUtils.isNotBlank(oldTag)) {
        val tmpStr: String = oldTag + "," + newTag
        val set: Set[String] = tmpStr.split(",").toSet
        val mergeTagStr: String = set.mkString(",")
        mergeTagStr
      } else {
        ""
      }
    })
    val result: DataFrame = joinDF.select(
      when(newDF.col("userId").isNotNull, newDF.col("userId"))
        .when(newDF.col("userId").isNull, oldDF.col("userId"))
        .as("userId")
      ,
      mergeTag(newDF.col("tagIds"), oldDF.col("tagIds")).as("tagIds")
    )
    result.show()

    result.write
      .format("cn.itcast.model.util.HbaseSource")
      .option(HBaseMeta.ZKHOSTS, meta.zkHosts)
      .option(HBaseMeta.ZKPORT, meta.zkPort)
      .option(HBaseMeta.HBASETABLE, "test34")
      .option(HBaseMeta.FAMILY, "detail")
      .option(HBaseMeta.SELECTFIELDS, "userId,tagIds")
      .save()
  }
}
