package cn.itcast.czxy

import java.util.Properties

import bean.HBaseMeta
import org.apache.spark.SparkContext
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

object payTypeTwo {
  def main(args: Array[String]): Unit = {
    //1 创建sparkSession 用于读取MySQL 和 HBASE
    val spark: SparkSession = SparkSession.builder().appName("payType").master("local[*]").getOrCreate()
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("WARN")

    //2 连接MySQL
    val url = "jdbc:mysql://bd001:3306/tags_new?userUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&user=root&password=123456"
    val table = "tbl_basic_tag"
    val properties = new Properties()
    val mysqlCoon: DataFrame = spark.read.jdbc(url, table, properties)

    //导入隐式转化
    import spark.implicits._
    //udf函数
    import org.apache.spark.sql.functions._

    //3 读取四级标签数据 封装 为读取HBASE 数据做准备
    val fourRow: Dataset[Row] = mysqlCoon.select('rule).where("id=97")
    val fourMap: Map[String, String] = fourRow.map(row => {
      //inType=HBase##zkHosts=192.168.10.20##zkPort=2181##hbaseTable=tbl_orders##family=detail##selectFields=memberId,paymentCode
      row.getAs("rule").toString
        //按照##切分
        .split("##")
        //inType=HBase zkHosts=192.168.10.20
        .map(line => {
          //zkHosts 192.168.10.20
          val arr: Array[String] = line.split("=")
          (arr(0), arr(1))
        })
    }).collectAsList().get(0).toMap

    //封装标签
    val hBaseMeta: HBaseMeta = getHBaseMeta(fourMap)


    //4 读取五级标签数据 用于计算、匹配标签
    val fiveRow: Dataset[Row] = mysqlCoon.select('id, 'rule).where("pid=97")
    val fiveDF: DataFrame = fiveRow.map(row => {
      val id: String = row.getAs("id").toString
      val rule: String = row.getAs("rule").toString
      (rule, id)
    }).toDF("rule", "id")

    //5 读取HBASE中数据
    val HBaseData: DataFrame = spark.read.format("tools.HBaseDataSource")
      .option(HBaseMeta.ZKHOSTS, hBaseMeta.zkHosts)
      .option(HBaseMeta.ZKPORT, hBaseMeta.zkPort)
      .option(HBaseMeta.HBASETABLE, hBaseMeta.hbaseTable)
      .option(HBaseMeta.FAMILY, hBaseMeta.family)
      .option(HBaseMeta.SELECTFIELDS, hBaseMeta.selectFields)
      .load


    //先统计出每个用户的每种支付方式次数 再确定支付次数最多的支付方式
    HBaseData.createOrReplaceTempView("HBaseData")

    val HBasePayRk: DataFrame = spark.sql(
      """
        |SELECT rank_tmp.memberId,rank_tmp.paymentCode
        |FROM
        |(SELECT pay_count.memberId,pay_count.paymentCode,
        |row_number() over(PARTITION BY pay_count.memberId ORDER BY pay_count.counts DESC) rk
        |FROM
        |(SELECT memberId,paymentCode,COUNT(paymentCode) AS counts
        |FROM HBaseData
        |GROUP BY memberId,paymentCode)pay_count)rank_tmp
        |WHERE rank_tmp.rk=1
        |""".stripMargin).toDF("memberId", "paymentCode")

    //6 根据五级标签数据 和 HBASE中数据 进行计算 匹配标签
    val newPayTypeTagDF: DataFrame = HBasePayRk.join(fiveDF, fiveDF("rule") === HBasePayRk("paymentCode"))
      .select('memberId as ("userId"), 'id as ("tagsId"))



    //7 将计算好的标签写入HBASE
    //读取历史数据 与当前数据进行join
    val historyDF: DataFrame = spark.read.format("tools.HBaseDataSource")
      .option(HBaseMeta.ZKHOSTS, hBaseMeta.zkHosts)
      .option(HBaseMeta.ZKPORT, hBaseMeta.zkPort)
      .option(HBaseMeta.HBASETABLE, "test")
      .option(HBaseMeta.FAMILY, "detail")
      .option(HBaseMeta.SELECTFIELDS, "userId,tagsId")
      .load()


    val JoinData: DataFrame = historyDF.join(newPayTypeTagDF, historyDF("userId") === newPayTypeTagDF("userID"))

    //进行去重、添加的操作
    val get_add_distinct: UserDefinedFunction = udf((historyTagsId: String, newPayTypeTagsId: String) => {
      if (historyTagsId == "") {
        newPayTypeTagsId
      } else if (newPayTypeTagsId == "") {
        historyTagsId
      } else if (historyTagsId == "" && newPayTypeTagsId == "") {
        ""
      } else {
        val add: String = historyTagsId + "," + newPayTypeTagsId
        add.split(",").distinct.mkString(",")
      }

    })

    val updateTags: DataFrame = JoinData.select(when(historyDF("userId").isNotNull, historyDF("userId")) as ("userId"),
      get_add_distinct(historyDF("tagsId"), newPayTypeTagDF("tagsId")) as ("tagsId"))


    //写入HBASE
    updateTags.write.format("tools.HBaseDataSource")
      .option(HBaseMeta.ZKHOSTS,hBaseMeta.zkHosts)
      .option(HBaseMeta.ZKPORT,hBaseMeta.zkPort)
      .option(HBaseMeta.HBASETABLE,"test")
      .option(HBaseMeta.FAMILY,"detail")
      .option(HBaseMeta.FAMILY,"userId,tagsId")
      .save()


  }

  //封装四级标签规则 为 HBaseMeta
  def getHBaseMeta(fourMap: Map[String, String]): HBaseMeta = {
    HBaseMeta(
      fourMap.getOrElse(HBaseMeta.INTYPE, ""),
      fourMap.getOrElse(HBaseMeta.ZKHOSTS, ""),
      fourMap.getOrElse(HBaseMeta.ZKPORT, ""),
      fourMap.getOrElse(HBaseMeta.HBASETABLE, ""),
      fourMap.getOrElse(HBaseMeta.FAMILY, ""),
      fourMap.getOrElse(HBaseMeta.SELECTFIELDS, ""),
      fourMap.getOrElse(HBaseMeta.ROWKEY, "")
    )

  }

}
