package cn.itcast.czxy.BD12.black

import java.util.Properties

import cn.itcast.czxy.BD12.black.bean.{HBaseMeta, TagRule}
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}



/**
 * 开发黑名单标签
 */
object BlackTages {

  def main(args: Array[String]): Unit = {

    //1、创建sparksql对象  读取mysql ，hbase数据
    val spark: SparkSession = SparkSession.builder().appName("BlackTages").master("local[*]").getOrCreate()
    //2、创建mysql连接  连接数据库
    //url: String, table: String, properties: Properties
    var url="jdbc:mysql://bd001:3306/tags_new?useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&user=root&password=123456"
    var table="tbl_basic_tag"   //mysql数据表的表名
    val properties = new Properties()
    val mysqlConn: DataFrame = spark.read.jdbc(url, table, properties)
    //3、读取mysql数据库中的四级标签
    val fourDS: Dataset[Row] = mysqlConn.select("rule").where("id=61")


    //隐式转换
    import spark.implicits._
    //引入java 和scala相互转换
    import scala.collection.JavaConverters._
    //引入sparkSQL的内置函数
    import org.apache.spark.sql.functions._


    var getAllTags=udf((oldTagsId:String,newTagsId:String)=>{
      if (oldTagsId==""){
        newTagsId
      }else if (newTagsId==""){
        oldTagsId
      }else if(oldTagsId==""&& newTagsId==""){
        ""
      }else{
        //拼接历史数据和新数据（可能有重复的数据）
        val alltags   = oldTagsId+","+newTagsId
        alltags.split(",").distinct
          .mkString(",")
      }
    })


    val fourMap: Map[String, String] = fourDS.map(row => {
      row.getAs("rule").toString.split("##")
        .map(kv => {
          val arr: Array[String] = kv.split("=")
          (arr(0), arr(1))
        })
    }).collectAsList().get(0).toMap

    //将map转换成样例类
    var hbaseMeta:HBaseMeta = getHBaseMeta(fourMap)
    println(hbaseMeta.selectFields)
    //4、读取mysql中的五级标签
    val fiveTagsDS: Dataset[Row] = mysqlConn.select('id  ,'rule).where("pid=61")


    //将fiveTagsDS  封装成样例类TagRule
    val fiveTageList: List[TagRule] = fiveTagsDS.map(row => { //row  是一条数据
      //获取出id   和 rule
      val id: Int = row.getAs("id").toString.toInt
      val rule: String = row.getAs("rule").toString
      //封装样例类
      TagRule(id, rule)
    }).collectAsList()   //将DataSet传换成util.List[TagRule]   这个类型遍历时无法获取id,rule数据
      .asScala.toList    //将util.List转换成list   需要隐式转换    import scala.collection.JavaConverters._

    //5、读取hbase数据

    val hBaseDatas: DataFrame = spark.read.format("cn.itcast.czxy.BD12.black.tools.HBaseDataSource")
      // hbaseMeta.zkHosts就是 192.168.10.20
      .option("zkHosts", hbaseMeta.zkHosts)
      //HBaseMeta.ZKPORT=>"zkPort"
      //hbaseMeta.zkPort=>2181
      .option(HBaseMeta.ZKPORT, hbaseMeta.zkPort)
      .option(HBaseMeta.HBASETABLE, hbaseMeta.hbaseTable)
      .option(HBaseMeta.FAMILY, hbaseMeta.family)
      .option(HBaseMeta.SELECTFIELDS, hbaseMeta.selectFields)
      .load()

    hBaseDatas.show()
    //6、匹配标签

    //编写udf函数  输入是1，2  返回值82 ，83
    var  GetTagId =udf((jobTag:String)=>{
      //设置标签默认值0
      var  id=0
      //遍历五级标签
      for(ruleOb<-fiveTageList){

        if (jobTag==ruleOb.rule){
          id=ruleOb.id
        }
      }
      id
    })

    //标签匹配
    val blackTags: DataFrame = hBaseDatas.select('id as("userId"), GetTagId('is_blackList).as("tagsId"))
    blackTags.show()

    //解决数据覆盖问题
    val oldTag: DataFrame = spark.read.format("cn.itcast.czxy.BD12.black.tools.HBaseDataSource")
      // hbaseMeta.zkHosts就是 192.168.10.20
      .option("zkHosts", hbaseMeta.zkHosts)
      //HBaseMeta.ZKPORT=>"zkPort"
      //hbaseMeta.zkPort=>2181
      .option(HBaseMeta.ZKPORT, hbaseMeta.zkPort)
      .option(HBaseMeta.HBASETABLE, "test")
      .option(HBaseMeta.FAMILY, "detail")
      .option(HBaseMeta.SELECTFIELDS, "userId,tagsId")
      .load()

    oldTag.show()

    val allTags = blackTags.join(oldTag, oldTag("userId") === blackTags("userId"))

    val fromw: DataFrame = allTags.select(
      //处理第一个字段    两个表中的多个userId字段，只读取一个
      when((oldTag.col("userId").isNotNull),(oldTag.col("userId")))
        .when((blackTags.col("userId").isNotNull),(blackTags.col("userId")))
        .as("userId"),

      //处理第二个字段  将两个字段个合并一起
      //自定义函数用于做数据的凭借
      getAllTags(oldTag.col("tagsId"),blackTags.col("tagsId")).as("tagsId")
    )

    //7、结果到hbase
    fromw.write.format("cn.itcast.czxy.BD12.black.tools.HBaseDataSource")
      .option("zkHosts", hbaseMeta.zkHosts)
      //HBaseMeta.ZKPORT=>"zkPort"
      //hbaseMeta.zkPort=>2181
      .option(HBaseMeta.ZKPORT, hbaseMeta.zkPort)
      .option(HBaseMeta.HBASETABLE,"test")
      .option(HBaseMeta.FAMILY, "detail")
      .option(HBaseMeta.SELECTFIELDS, "userId,tagsId")
      .save()


  }
  def getHBaseMeta(fourMap: Map[String, String]): HBaseMeta = {
    HBaseMeta(
      fourMap.getOrElse(HBaseMeta.INTYPE,""),
      fourMap.getOrElse(HBaseMeta.ZKHOSTS,""),
      fourMap.getOrElse(HBaseMeta.ZKPORT,""),
      fourMap.getOrElse(HBaseMeta.HBASETABLE,""),
      fourMap.getOrElse(HBaseMeta.FAMILY,""),
      fourMap.getOrElse(HBaseMeta.SELECTFIELDS,""),
      fourMap.getOrElse(HBaseMeta.ROWKEY,"")
    )
  }
}
