package cn.itcast.model.matchtag

import java.util.Properties

import cn.itcast.model.bean.{HBaseMeta, TagRule}
import org.apache.commons.lang3.StringUtils
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

import scala.tools.scalap.scalax.util.StringUtil

object JobModel {
  def main(args: Array[String]): Unit = {
    //spark环境
    val spark = SparkSession.builder()
      .appName("JobModel")
      .master("local[*]")
      .getOrCreate()
    import spark.implicits._
    import org.apache.spark.sql.functions._
    //mysql数据源
    val url:String="jdbc:mysql://bd001:3306/tags_new?useUnicode=true&characterEncoding=UTF-8" +
      "&serverTimezone=UTC&user=root&password=123456"
    val table="tbl_basic_tag"
    val properties=new Properties
    val mysqlDF: DataFrame = spark.read.jdbc(url,table,properties)
    //4. 开始获取4/5级规则数据
    //   1. 获取4级规则数据: 将获取到的数据=>HBaseMeta
    val meta: HBaseMeta = mysqlDF.select('rule).where("id=388")
      .map(row => {
        val str = row.getAs[String]("rule")
        val arr: Array[String] = str.split("##")
        val tuples: Array[(String, String)] = arr.map(word => {
          val keyvalue: Array[String] = word.split("=")
          keyvalue(0) -> keyvalue(1)
        })
        val map = tuples.toMap
        HBaseMeta(map)
      }).collect()(0)
    println(meta)
    val fiveRuleList: List[TagRule] =mysqlDF.select('id,'rule).where("pid=388")
      .map(row=>{
        val id = row.getAs[Int]("id").toString
        val rule=row.getAs[String]("rule").toString
        TagRule(id,rule)
      }).collect().toList
      println(fiveRuleList)
    val hbaseSource: DataFrame = spark.read
      //      .format("自定义数据源的全路径")
      .format("cn.itcast.model.utils.HBaseSource")
      //      .option("参数名", "参数值")
      .option(HBaseMeta.ZKHOSTS, meta.zkHosts)
      .option(HBaseMeta.ZKPORT, meta.zkPort)
      .option(HBaseMeta.HBASETABLE, meta.hbaseTable)
      .option(HBaseMeta.FAMILY, meta.family)
      .option(HBaseMeta.SELECTFIELDS, meta.selectFields)
      .load()
    //hbaseSource.show()
    //6. 开始进行职业标签计算
    // 编写职业的自定义函数
    val getTag = udf((job: String) => {
      // 遍历职业的5级标签规则列表,查找对应的标签ID
      var tagId = ""
      for (tagRule <- fiveRuleList) {
        if (job == tagRule.rule) {
          tagId = tagRule.id
        }
      }
      tagId
    })
    val newDF: DataFrame = hbaseSource.select('id.as("userId"), getTag('job).as("tagIds"))
    //newDF.show()
    //7. 将数据落地到HBase,需要进行标签的合并操作
    //   1. 读取之前的历史标签
    val oldDF: DataFrame = spark.read
      //      .format("自定义数据源的全路径")
      .format("cn.itcast.model.utils.HBaseSource")
      //      .option("参数名", "参数值")
      .option(HBaseMeta.ZKHOSTS, meta.zkHosts)
      .option(HBaseMeta.ZKPORT, meta.zkPort)
      .option(HBaseMeta.HBASETABLE, "test_bs")
      .option(HBaseMeta.FAMILY, meta.family)
      .option(HBaseMeta.SELECTFIELDS, "userId,tagIds")
      .load()
    //oldDF.show()
    //   2. 将新老标签数据进行合并操作
    // (右表, 关联条件, 关联类型) 全连接: 包含新表和老表的所有用户信息.
    val joinDF: DataFrame = newDF.join(oldDF,newDF.col("userId")===oldDF.col("userId"),"full")
    joinDF.show()
    println("===============")
    newDF.show()
    oldDF.show()
    // 对新老数据进行合并操作.
    // 定义一个自定义函数,实现标签的合并操作.
    val merge=udf((newTag: String, oldTag: String)=>{
      if(StringUtils.isBlank(newTag)){
        oldTag
      }else if(StringUtils.isBlank(oldTag)){
        newTag
      }else if(StringUtils.isNotBlank(newTag) && StringUtils.isNotBlank(oldTag)){
        val tmp=oldTag+","+newTag
        val set: Set[String] = tmp.split(",").toSet
        set.mkString(",")
      }else{
        ""
      }
    })
    val result: DataFrame = joinDF.select(
      when(newDF.col("userId").isNotNull, newDF.col("userId"))
      .when(newDF.col("userId").isNull, oldDF.col("userId"))
        .as("userId"),
      merge(newDF.col("tagIds"), oldDF.col("tagIds"))
        .as("tagIds")
    )
    result.show()
    //   3. 将合并后的结果保存到HBase
    result.write
      //      .format("自定义数据落地的数据源")
      .format("cn.itcast.model.utils.HBaseSource")
      //      .option("参数名", "参数值")
      .option(HBaseMeta.ZKHOSTS, meta.zkHosts)
      .option(HBaseMeta.ZKPORT, meta.zkPort)
      .option(HBaseMeta.HBASETABLE, "test_bs")
      .option(HBaseMeta.FAMILY, "detail")
      .option(HBaseMeta.SELECTFIELDS, "userId,tagIds")
      .save()
  }
}
