package cn.itcast.czxy.BD18

import java.util.Properties

import cn.itcast.czxy.BD18.bean.{TagesRule, Tagsfour}
import org.apache.spark.sql.{DataFrame, DataFrameReader, SparkSession}

object NationalityTag {
  def main(args: Array[String]): Unit = {
    //创建sparksql
    val spark = SparkSession.builder().master("local[*]").appName("NationalityTag").getOrCreate()
    //通过jdbc连接到mysql
    val url = "jdbc:mysql://bd001:3306/tags_new?userUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&user=root&password=123456"
    val tableName = "tbl_basic_tag"
    val properties = new Properties()
    val mysqlBD: DataFrame = spark.read.jdbc(url, tableName, properties)
    //隐式转换
    import spark.implicits._
    import org.apache.spark.sql.functions._
    //获取四级标签的rule，将rule装换为map类型
    val sijibiaoqian: Map[String, String] = mysqlBD.select("rule").where("id=72").map(row => {
      row.getAs("rule").toString.split("##").map(kv => {
        val strings: Array[String] = kv.split("=")
        (strings(0), strings(1))
      })
    }).collectAsList().get(0).toMap
    //将map类型的rule放入Tagsfour对象
    val tagsfour: Tagsfour = toHbaseMeta(sijibiaoqian)

    //获取五级标签
    val wujibiaoqian: List[TagesRule] = mysqlBD.select("id", "rule").where("pid=72").map(row => {
      TagesRule(
        row.getAs("id").toString.toInt,
        row.getAs("rule").toString
      )
    }).collect().toList
//读取hbase内的user数据
    val hbaseBD= spark.read.format("cn.itcast.czxy.BD18.tools.HBaseDataSource")
      .option(Tagsfour.ZKHOSTS, tagsfour.zkHosts)
      .option(Tagsfour.ZKPORT, tagsfour.zkPort)
      .option(Tagsfour.HBASETABLE, tagsfour.hbaseTable)
      .option(Tagsfour.FAMILY, tagsfour.family)
      .option(Tagsfour.SELECTFIELDS, tagsfour.selectFields)
        .load()
      //将用户的国籍转换为tagid
    val gitId = udf((nationality: String) => {
      var id = 0
      for (elem <- wujibiaoqian) {
        if (elem.rule == nationality) {
          id = elem.id
        }
      }
      id
    })

    val newTag: DataFrame = hbaseBD.select('id as("userId"),gitId('nationality).as("tagsId"))
//读取hbase历史数据
    val oidTag= spark.read.format("cn.itcast.czxy.BD18.tools.HBaseDataSource")
      .option(Tagsfour.ZKHOSTS, tagsfour.zkHosts)
      .option(Tagsfour.ZKPORT, tagsfour.zkPort)
      .option(Tagsfour.HBASETABLE, "test")
      .option(Tagsfour.FAMILY, "detail")
      .option(Tagsfour.SELECTFIELDS, "userId,tagsId")
      .load()
    //将用户标签的历史和新的标签合并
    val allTag = oidTag.join(newTag,oidTag("userId")===newTag("userId"))

    val addTagId = udf((oidTagId: String, newTagId: String) => {
      var tagid = ""
      if (oidTagId == "" && newTagId == "") {
        tagid = ""
      } else if (oidTagId == "") {
        tagid = newTagId
      } else if (newTagId == "") {
        tagid = oidTagId
      } else {
        val str: String = oidTagId + "," + newTagId
        tagid = str.split(",").distinct.mkString(",")
      }
      tagid
    })

    val userTag = allTag.select(
      when(oidTag("userId").isNotNull, oidTag("userId"))
        .when(newTag("userId").isNotNull, newTag("userId"))
        .as("userId")
      ,
      addTagId(oidTag("tagsId"), newTag("tagsId")).as("tagsId")
    )
    //将新数据覆盖写入
    userTag.write.format("cn.itcast.czxy.BD18.tools.HBaseDataSource")
      .option(Tagsfour.ZKHOSTS, tagsfour.zkHosts)
      .option(Tagsfour.ZKPORT, tagsfour.zkPort)
      .option(Tagsfour.HBASETABLE, "test")
      .option(Tagsfour.FAMILY, "detail")
      .option(Tagsfour.SELECTFIELDS, "userId,tagsId")
      .save()

    spark.stop()
  }
//将四级标签存入Tagsfour对象的方法
  def toHbaseMeta(sijibiaoqian: Map[String, String]): Tagsfour = {
    Tagsfour(
      sijibiaoqian.getOrElse(Tagsfour.INTYPE, ""),
      sijibiaoqian.getOrElse(Tagsfour.ZKHOSTS, ""),
      sijibiaoqian.getOrElse(Tagsfour.ZKPORT, ""),
      sijibiaoqian.getOrElse(Tagsfour.HBASETABLE, ""),
      sijibiaoqian.getOrElse(Tagsfour.FAMILY, ""),
      sijibiaoqian.getOrElse(Tagsfour.SELECTFIELDS, ""),
      sijibiaoqian.getOrElse(Tagsfour.ROWKEY, "")
    )
  }
}
