package cn.itcast.czxy.BD18

import java.util.Properties

import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
import cn.itcast.czxy.BD18.bean.{TagesRule, Tagsfour}
object GenderTag {
  def main(args: Array[String]): Unit = {
    //用户id
    var uid="4"




    val spark: SparkSession = SparkSession.builder().master("local[*]").appName("GenderTag").getOrCreate()
    var url = "jdbc:mysql://bd001:3306/tags_new?userUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&user=root&password=123456"
    var tableName = "tbl_basic_tag"
    val properties: Properties = new Properties()
    val mysqlBD: DataFrame = spark.read.jdbc(url, tableName, properties)
    //隐式转换
    //引入隐式转换
    import  spark.implicits._
    //引入java 和scala相互转换
    import scala.collection.JavaConverters._
    //引入sparkSQL的内置函数
    import org.apache.spark.sql.functions._

    //3.获取四级标签规则，为读取hbase数据做准备
    val fourTagsDS: Dataset[Row] = mysqlBD.select("rule").where(s"id=${uid}")
    val fourMap: Map[String, String] = fourTagsDS.map(row => {
      //使用##  切分再使用=切分
      row.getAs("rule").toString.split("##")
        .map(kv => {
          val arry: Array[String] = kv.split("=")
          (arry(0), arry(1))
        })
    }).collectAsList().get(0).toMap

    val fourTagsRule: Tagsfour = new Tagsfour(fourMap.getOrElse(Tagsfour.INTYPE, ""),
      fourMap.getOrElse(Tagsfour.ZKHOSTS, ""),
      fourMap.getOrElse(Tagsfour.ZKPORT, ""),
      fourMap.getOrElse(Tagsfour.HBASETABLE, ""),
      fourMap.getOrElse(Tagsfour.FAMILY, ""),
      fourMap.getOrElse(Tagsfour.SELECTFIELDS, ""),
      fourMap.getOrElse(Tagsfour.ROWKEY, "")
    )
println(fourTagsRule)

    //4  读取mysql数据库中方的五级标签
    //   匹配性别
    val fiveTagsDS: Dataset[Row] = mysqlBD.select('id  ,'rule).where(s"pid=${uid}")
    //将fiveTagsDS  封装成样例类TagRule
    val fiveTageList: List[TagesRule] = fiveTagsDS.map(row => { //row  是一条数据
      //获取出id   和 rule
      val id: Int = row.getAs("id").toString.toInt
      val rule: String = row.getAs("rule").toString
      //封装样例类
      TagesRule(id, rule)
    }).collectAsList()
      .asScala.toList    //将util.List转换成list   需要隐式转换   import scala.collection.JavaConverters._


    //5.根据四级标签读取   hbase数据 tbl_user
    val oidUserTags: DataFrame = spark.read.format("cn.itcast.czxy.BD18.tools.HBaseDataSource")
      .option("zkHosts", fourTagsRule.zkHosts)
      .option("zkPort", fourTagsRule.zkPort)
      //存储的表
      .option("hbaseTable", fourTagsRule.hbaseTable)
      .option("family", fourTagsRule.family)
      .option("selectFields", fourTagsRule.selectFields)
      .load()




    var getGenTag=udf((gen:String)=>{
      var id=0

      for (fiveTag<-fiveTageList){
        if (gen==fiveTag.rule){
          id=fiveTag.id.toInt
        }
      }
      id
    })

    //6.标签匹配
    val userTags: DataFrame = oidUserTags.select('id as("userId"),getGenTag('gender)as("tagsId"))

    userTags.show()

    //7.将最终的标签写入hbase
    userTags.write.format("cn.itcast.czxy.BD18.tools.HBaseDataSource")
      .option(Tagsfour.ZKHOSTS, fourTagsRule.zkHosts)
      .option(Tagsfour.ZKPORT, fourTagsRule.zkPort)
      .option(Tagsfour.HBASETABLE,"test")
      .option(Tagsfour.FAMILY, "detail")
      .option(Tagsfour.SELECTFIELDS, "userId,tagsId")
      .save()

//        spark.stop()
  }
}
