package cn.itcast.czxy

import java.util.Properties

import bean.{HBaseMeta, TagRule}
import org.apache.spark.SparkContext
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

object JobTag {
  def main(args: Array[String]): Unit = {

    //1 创建sparksql
    val spark: SparkSession = SparkSession.builder.appName("JobTag").master("local[*]").getOrCreate
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("WARN")

    //2 连接MySQL
    //2 连接MySQL数据库
    val url = "jdbc:mysql://bd001:3306/tags_new?userUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&user=root&password=123456"
    val table = "tbl_basic_tag"
    val properties = new Properties()
    val mysqlCoon: DataFrame = spark.read.jdbc(url, table, properties)

    //隐式转换
    import spark.implicits._
    //引入java和scala相互转化
    import scala.collection.JavaConverters._
    //引入sparkSQL的内置函数
    import org.apache.spark.sql.functions._

    //3 读取四级标签 为读取HBASE做准备
    val fourDS: Dataset[Row] = mysqlCoon.select('rule).where("id=66")
    val fourMap: Map[String, String] = fourDS.map(row => {
      //使用##切分
      row.getAs("rule").toString.split("##")
        //再用=切分
        .map(line => {
          val arr: Array[String] = line.split("=")
          (arr(0), arr(1))
        })
    }).collectAsList().get(0).toMap

    //将fourMap转化为HBaseMeta样例类

    var HbaseMeta: HBaseMeta = getHBaseMeta(fourMap)
    //println(HbaseMeta.selectFields)

    //4 读取五级标签 未匹配职业做准备
    val fiveDS: Dataset[Row] = mysqlCoon.select('id, 'rule).where("pid=66")
    val fiveList: List[TagRule] = fiveDS.map(row => {
      val id: Int = row.getAs("id").toString.toInt
      val rule: String = row.getAs("rule").toString
      TagRule(id, rule)
    }).collectAsList().asScala.toList


    //5 读取HBASE数据
    val HBaseDF: DataFrame = spark.read.format("tools.HBaseDataSource")
      .option(HBaseMeta.ZKHOSTS, HbaseMeta.zkHosts)
      .option(HBaseMeta.ZKPORT, HbaseMeta.zkPort)
      .option(HBaseMeta.HBASETABLE, HbaseMeta.hbaseTable)
      .option(HBaseMeta.FAMILY, HbaseMeta.family)
      .option(HBaseMeta.SELECTFIELDS, HbaseMeta.selectFields)
      .load()
    //HBaseDF.show(20)

    //职业变换的自定义函数
    var getTags = udf((hJob: String) => {
      var id = 0
      for (ftr <- fiveList) {
        if (ftr.rule == hJob) {
          id = ftr.id
        }
      }
      id
    })


    //6 根据五级标签rule与HBASE职业标签匹配 进行规则匹配
    val JobTagResult: DataFrame = HBaseDF.select('id.as("userId"), getTags('job).as("tagsId"))
JobTagResult.show(20)


    //自定义函数
    val getAllTags = udf((historyTagId: String, newTagId:String) => {
      if (historyTagId == "") {
        newTagId
      } else if (newTagId == "") {
        historyTagId
      } else if (newTagId == "" && historyTagId == "") {
        ""
      } else {
        //拼接历史数据和新数据（多次运行可能有重复数据）
        val alltags: String = historyTagId + "," + newTagId
        //使用，分割去重后返回字符串类型
        alltags.split(",").distinct.mkString(",")
      }
    })
    //7 解决标签覆盖问题
    //读取标签结果表 追加标签覆盖写入
    //a读取test内历史标签数据 已经计算出来的标签
    val historyTag: DataFrame = spark.read.format("tools.HBaseDataSource")
      .option(HBaseMeta.ZKHOSTS, HbaseMeta.zkHosts)
      .option(HBaseMeta.ZKPORT, HbaseMeta.zkPort)
      .option(HBaseMeta.HBASETABLE, "test")
      .option(HBaseMeta.FAMILY, "detail")
      .option(HBaseMeta.SELECTFIELDS, "userId,tagsId")
      .load()
    //  historyTag.show(20)
    //b追加计算出来的标签到历史数f据
    val JoinTags: DataFrame = historyTag.join(JobTagResult, historyTag("userId") === JobTagResult("userId"))
    val updateTags: DataFrame = JoinTags.select(
      when(historyTag.col("userId").isNotNull, historyTag.col("userId"))
        .when(JobTagResult.col("userId").isNotNull, JobTagResult.col("userId"))
        .as("userId"),
      //处理第二个字段 将两个字段合并到一起
      //自定义函数用于做数据的拼接
      getAllTags(historyTag.col("tagsId"), JobTagResult.col("tagsId")).as("tagsId")

    )
    //c最后覆盖写入HBASE



    //8 将匹结果签写入到HBASE
        updateTags.write.format("tools.HBaseDataSource")
          .option(HBaseMeta.ZKHOSTS, HbaseMeta.zkHosts)
          .option(HBaseMeta.ZKPORT, HbaseMeta.zkPort)
          .option(HBaseMeta.HBASETABLE, "test")
          .option(HBaseMeta.FAMILY, "detail")
          .option(HBaseMeta.SELECTFIELDS,"userId,tagsId")
          .save()


  }

  def getHBaseMeta(fourMap: Map[String, String]): HBaseMeta = {
    HBaseMeta(
      fourMap.getOrElse(HBaseMeta.INTYPE, ""),
      fourMap.getOrElse(HBaseMeta.ZKHOSTS, ""),
      fourMap.getOrElse(HBaseMeta.ZKPORT, ""),
      fourMap.getOrElse(HBaseMeta.HBASETABLE, ""),
      fourMap.getOrElse(HBaseMeta.FAMILY, ""),
      fourMap.getOrElse(HBaseMeta.SELECTFIELDS, ""),
      fourMap.getOrElse(HBaseMeta.ROWKEY, "")
    )
  }

}
