package test


import java.util.Properties

import bean.{HBaseMeta, TagRule}
import org.apache.spark.SparkContext
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

object sexTag {

  def main(args: Array[String]): Unit = {
    //1 创建sparkSession读取HBASE、MySQL中的数据
    val spark: SparkSession = SparkSession.builder().appName("sexTag").master("local[*]").getOrCreate()
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("WARN")

    //2 连接MySQL数据库  url: String, table: String, properties: Properties
    val url = "jdbc:mysql://bd001:3306/tags_new?userUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&user=root&password=123456"
    val table = "tbl_basic_tag"
    val properties = new Properties()
    val mysqlCoon: DataFrame = spark.read.jdbc(url, table, properties)

    //导入隐式转化
    import spark.implicits._
    //引入java和scala互相转化
    import scala.collection.JavaConverters._
    //引入sparkSQL内置函数
    import org.apache.spark.sql.functions._

    //3 读取MySQL中四级标签 为读取HBASE做准备
    //inType=HBase##zkHosts=192.168.10.20##zkPort=2181##hbaseTable=tbl_users##family=detail##selectFields=id,gender
    val fourTag: Dataset[Row] = mysqlCoon.select("id", "rule").where("id=63")
    val kvMap: Map[String, String] = fourTag.map(row => {
      row.getAs("rule").toString
        .split("##")
        /*
          inType=HBase
          zkHosts=192.168.10.20
          zkPort=2181
          hbaseTable=tbl_users
          family=detail
          selectFields=id,gender
         */
        .map(line => {
          val arr: Array[String] = line.split("=")
          (arr(0), arr(1))

        })
    }).collectAsList().get(0).toMap

    //将kvMap封装成样例类
    val hMeta: HBaseMeta = HBaseMeta(
      kvMap.getOrElse("inType", ""),
      kvMap.getOrElse("zkHosts", ""),
      kvMap.getOrElse("zkPort", ""),
      kvMap.getOrElse("hbaseTable", ""),
      kvMap.getOrElse("family", ""),
      kvMap.getOrElse("selectFields", ""),
      kvMap.getOrElse("rowKey", "")
    )


    //4 读取MySQL中五级标签 匹配性别
    val fiveTags: Dataset[Row] = mysqlCoon.select('id, 'rule).where("pid=63")
    val fiveTagsList: List[TagRule] = fiveTags.map(row => {
      val id: Int = row.getAs("id").toString.toInt
      val rule: String = row.getAs("rule").toString
      //封装五级标签
      TagRule(id, rule)
    }).collectAsList() //返回的是java的util.list,便利时拿不到值
      .asScala.toList


    //5 根据四级标签的 rule字段值 读取HBASE
    val hData: DataFrame = spark.read.format("tools.HBaseDataSource")
      .option(HBaseMeta.ZKHOSTS, hMeta.zkHosts)
      .option(HBaseMeta.ZKPORT, hMeta.zkPort)
      .option(HBaseMeta.HBASETABLE, hMeta.hbaseTable)
      .option(HBaseMeta.FAMILY, hMeta.family)
      .option(HBaseMeta.SELECTFIELDS, hMeta.selectFields)
      .load()



    //6 标签匹配 根据五级标签的rule 和HBASE数据进行匹配 得到最终标签
    //通过udf函数进行标签匹配 1->82 2->83
    val TransformationId = udf((HGender: String) => {
      var id = 0
      //遍历五级标签 当五级标签的rule和HBASE的gender对应时，将五级标签的id赋值给变量id
      for (five_tags <- fiveTagsList)
        if (HGender == five_tags.rule) {
          id = five_tags.id
        }
      id
    })
    val resultTags: DataFrame = hData.select('id as ("userId"), TransformationId('gender).as("tagsId"))



    //7 把最终标签写入HBASE
    resultTags.write.format("tools.HBaseDataSource")
      .option(HBaseMeta.ZKHOSTS, hMeta.zkHosts)
      .option(HBaseMeta.ZKPORT, hMeta.zkPort)
      .option(HBaseMeta.HBASETABLE,"mytest")
      .option(HBaseMeta.FAMILY,"info")
      .option(HBaseMeta.SELECTFIELDS,"userId,tagsId")
      .save()


  }

}
