package cn.itcast.czxy.BD18

import java.util.Properties

import cn.itcast.czxy.BD18.bean.{TagesRule, Tagsfour}
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

object Job {
//  程序入口
  def main(args: Array[String]): Unit = {
//     1. 创建 SparkSql 实例对象   用于 读取  hbase   mysql 数据 --------------谢菲
    val spark = SparkSession.builder().appName("job").master("local[*]").getOrCreate()
//    2. 连接 mysql 数据库
    var url = "jdbc:mysql://bd001:3306/tags_new?userUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&user=root&password=123456"
    var tableName = "tbl_basic_tag"
    val properties: Properties = new Properties()
    val mysqlConn: DataFrame = spark.read.jdbc(url, tableName, properties)
    val fourDS: Dataset[Row] = mysqlConn.select("rule").where("id=65")

//    隐式转化
    import spark.implicits._
    import org.apache.spark.sql.functions._
    import scala.collection.mutable.Set

//     3. 读取四级标签数据----------赖守建
    val fourMap: Map[String, String] = fourDS.map(row => {
      //使用“##”切分
      row.getAs("rule").toString.split("##").map(kv => {
        //使用“=”切分
        val srr: Array[String] = kv.split("=")
        (srr(0), srr(1))
      })
    }).collectAsList().get(0).toMap
//    val tagsfour: Tagsfour = getHbaseMeta(fourMap)
    val tagsfour: Tagsfour = new Tagsfour(fourMap.getOrElse(Tagsfour.INTYPE, ""),
      fourMap.getOrElse(Tagsfour.ZKHOSTS, ""),
      fourMap.getOrElse(Tagsfour.ZKPORT, ""),
      fourMap.getOrElse(Tagsfour.HBASETABLE, ""),
      fourMap.getOrElse(Tagsfour.FAMILY, ""),
      fourMap.getOrElse(Tagsfour.SELECTFIELDS, ""),
      fourMap.getOrElse(Tagsfour.ROWKEY, "")
    )
    //    println(tagsfour)

    //4.读取五级标签数据 --------------------王双财
    val fivetaglist: List[TagesRule] = mysqlConn.select("id", "rule").where("pid=65").map(row => {
      val id: Int = row.getAs("id").toString.toInt
      val rule: String = row.getAs("rule").toString
      TagesRule(id, rule)
    }).collect().toList
    //    println(fivetaglist)

    //5.根据四级标签读取hbase数据-------------陈雨弘
    val userDF: DataFrame = spark.read.format("cn.itcast.czxy.BD18.cn.itcast.czxy.BD18.tools.HBaseDataSource")
      .option("zkHosts", tagsfour.zkHosts)
      .option("zkPort", tagsfour.zkPort)
      .option("hbaseTable", tagsfour.hbaseTable)
      .option("family", tagsfour.family)
      .option("selectFields", tagsfour.selectFields)
      .load()

    val getid = udf((job: String) => {
      var tagId = 0
      for (elem <- fivetaglist) {
        if (elem.rule == job) {
          tagId = elem.id
        }
      }
      tagId
    })

    val userTags = userDF.select('id.as("userId"), getid('job).as("tagsId"))

    val oidUserTags: DataFrame = spark.read.format("cn.itcast.czxy.BD18.cn.itcast.czxy.BD18.tools.HBaseDataSource")
      .option("zkHosts", tagsfour.zkHosts)
      .option("zkPort", tagsfour.zkPort)
      //存储的表
      .option("hbaseTable", "test")
      .option("family", "detail")
      .option("selectFields", "userId,tagsId")
      .load()

    val addtag = udf((oid: String, neww: String) => {
      var tagid = ""
      if (oid == "") {
        tagid = neww
      } else if (neww == "") {
        tagid = oid
      } else if (neww == "" && oid == "") {
        tagid = ""
      } else {
        val str: String = oid + "," + neww
        tagid= str.split(",").distinct.mkString(",")
      }
      tagid
    })


    val alltages: DataFrame = oidUserTags.join(userTags,oidUserTags("userId")===userTags("userId"))
    val upTag: DataFrame = alltages.select(
      when(oidUserTags.col("userId").isNotNull, oidUserTags("userId"))
        .when(userTags.col("userId").isNotNull, userTags("userId"))
        .as("userId")
      ,
      addtag(oidUserTags("tagsId"), userTags("tagsId")).as("tagsId")
    )



    upTag.write.format("cn.itcast.czxy.BD18.tools.HBaseDataSource")
      .option("zkHosts", tagsfour.zkHosts)
      .option("zkPort", tagsfour.zkPort)
      //存储的表
      .option("hbaseTable", "test")
      .option("family", "detail")
      .option("selectFields", "userId,tagsId")
      .save()

    spark.stop()
  }

  def getHbaseMeta(fourMap: Map[String, String]): Tagsfour = {
    Tagsfour(
      fourMap.getOrElse(Tagsfour.INTYPE, ""),
      fourMap.getOrElse(Tagsfour.ZKHOSTS, ""),
      fourMap.getOrElse(Tagsfour.ZKPORT, ""),
      fourMap.getOrElse(Tagsfour.HBASETABLE, ""),
      fourMap.getOrElse(Tagsfour.FAMILY, ""),
      fourMap.getOrElse(Tagsfour.SELECTFIELDS, ""),
      fourMap.getOrElse(Tagsfour.ROWKEY, "")
    )
  }
}
