package cn.itcast.czxy

import java.util.Properties

import bean.{HBaseMeta}
import org.apache.spark.SparkContext

import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

object JobTagTwo {
  def main(args: Array[String]): Unit = {
    //1 创建SparkSql用于读取MySQL HBASE数据
    val spark: SparkSession = SparkSession.builder().appName("JobTagTwo").master("local[*]").getOrCreate()
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("WARN")

    //2 连接MySQL
    val url = "jdbc:mysql://bd001:3306/tags_new?userUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&user=root&password=123456"
    val table = "tbl_basic_tag"
    val properties = new Properties()
    val mysqlCoon: DataFrame = spark.read.jdbc(url, table, properties)

    //导入隐式转化
    import spark.implicits._
    //java 和 scala互转
    import scala.collection.JavaConverters._
    import org.apache.spark.sql.functions._


    //3 读取MySQL四级标签 封装为样例类 为读取HBASE数据做准备
    //inType=HBase##zkHosts=192.168.10.20##zkPort=2181##hbaseTable=tbl_users##family=detail##selectFields=id,job
    val fourDS: Dataset[Row] = mysqlCoon.select('rule).where("id=66")
    val fourMap: Map[String, String] = fourDS.map(row => {
      row.getAs("rule").toString
        .split("##")
        //再以=分割
        .map(line => {
          val arrs: Array[String] = line.split("=")
          (arrs(0), arrs(1))
        })
    }).collectAsList().get(0).toMap

    //封装样例类
    val HBM: HBaseMeta = getHBaseMeta(fourMap)
    //println(HBM.selectFields)


    //4 读取MySQL五级标签 封装为样例类 为职业匹配做准备
    val fiveDS: Dataset[Row] = mysqlCoon.select('id, 'rule).where("pid=66")
    val fiveMap: Map[String, String] = fiveDS.map(line => {
      val id: String = line.getAs("id").toString
      val rule: String = line.getAs("rule").toString
      (rule, id)
    }).collectAsList().asScala.toList.toMap


    //5 根据规则数据库中的制定的的规则读取HBASE数据库中数据
    val HBaseDF: DataFrame = spark.read.format("tools.HBaseDataSource")
      .option(HBaseMeta.ZKHOSTS, HBM.zkHosts)
      .option(HBaseMeta.ZKPORT, HBM.zkPort)
      .option(HBaseMeta.HBASETABLE, HBM.hbaseTable)
      .option(HBaseMeta.FAMILY, HBM.family)
      .option(HBaseMeta.SELECTFIELDS, HBM.selectFields)
      .option(HBaseMeta.ROWKEY, HBM.rowKey)
      .load()
    //HBaseDF.show(20)

    //6 利用五级标签与HBASE数据库中数据进行匹配 进行标签匹配 得到新数据
    //创建udf函数进行标签转化
    val GetTags = udf((HJob: String) => {
      var id = "error";
      if (fiveMap.getOrElse(HJob, "") != "") {
        id = fiveMap.getOrElse(HJob, "")
      }
      id
    })

    val newDataDF: DataFrame = HBaseDF.select('id as ("userId"), GetTags('job) as ("tagsId"))

    //编写udf函数对tagsId进行追加
    val getTagsId = udf((historyid: String, newdataid: String) => {
      if (historyid == "") {
        newdataid
      } else if (newdataid == "") {
        historyid
      } else if (historyid == "" && newdataid == "") {
        ""
      } else {
        val arrs: String = historyid + "," + newdataid
        //同时为了避免多次运行，拿到的历史值重复，要对字段进行去重重
        arrs.split(",").distinct.mkString(",")
      }

    })

    //7 解决HBASE中数据覆盖的问题
    //读取HBASE历史数据
    val historyDF: DataFrame = spark.read.format("tools.HBaseDataSource")
      .option(HBaseMeta.ZKHOSTS, HBM.zkHosts)
      .option(HBaseMeta.ZKPORT, HBM.zkPort)
      .option(HBaseMeta.HBASETABLE, "mytest")
      .option(HBaseMeta.FAMILY, "info")
      .option(HBaseMeta.SELECTFIELDS, "userId,tagsId")
      .load()
    //与新数据进行join 处理选取需要字段 具体用when 和 udf函数完成
    val joinTagsDF: DataFrame = historyDF.join(newDataDF, historyDF.col("userId") === newDataDF.col("userId"))
    //joinTagsDF.show(20)
    //同时需要解决代码运行多次造成的重复问题 需要对数据做去重处理
    val updateTags: DataFrame = joinTagsDF.select(
      when(historyDF("userId").isNotNull, historyDF("userId")).as("userId"),
      //编写udf函数处理tagsId,进行追加、去重
      getTagsId(historyDF("tagsId"), newDataDF("tagsId")).as("tagsId")

    )

    //8 将更新数据写入到HBASE表中 完善用户信息
    updateTags.write.format("tools.HBaseDataSource")
      .option(HBaseMeta.ZKHOSTS,HBM.zkHosts)
      .option(HBaseMeta.ZKPORT,HBM.zkPort)
      .option(HBaseMeta.HBASETABLE,"mytest")
      .option(HBaseMeta.FAMILY,"info")
      .option(HBaseMeta.SELECTFIELDS,"userId,tagsId")
      .save()


  }

  //封装四级标签样例类
  def getHBaseMeta(fourMap: Map[String, String]): HBaseMeta = {
    HBaseMeta(
      fourMap.getOrElse(HBaseMeta.INTYPE, ""),
      fourMap.getOrElse(HBaseMeta.ZKHOSTS, ""),
      fourMap.getOrElse(HBaseMeta.ZKPORT, ""),
      fourMap.getOrElse(HBaseMeta.HBASETABLE, ""),
      fourMap.getOrElse(HBaseMeta.FAMILY, ""),
      fourMap.getOrElse(HBaseMeta.SELECTFIELDS, ""),
      fourMap.getOrElse(HBaseMeta.ROWKEY, "")
    )
  }

}
