package cn.itcasst.czxy

import java.text.SimpleDateFormat
import java.util.Properties

import bean.HBaseMeta
import org.apache.spark.SparkContext
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

object ageTagTwo {
  def main(args: Array[String]): Unit = {
    //1 创建spark-sql 读取MySQL HBASE
    val spark: SparkSession = SparkSession.builder.appName("JobTag").master("local[*]").getOrCreate
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("WARN")

    //2 连接MySQL
    val url = "jdbc:mysql://bd001:3306/tags_new?userUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&user=root&password=123456"
    val table = "tbl_basic_tag"
    val properties = new Properties()
    val mysqlCoon: DataFrame = spark.read.jdbc(url, table, properties)

    //隐式转换
    import spark.implicits._
    //引入sparkSQL的内置函数
    import org.apache.spark.sql.functions._
    //java和scala互转
    import scala.collection.JavaConverters._

    //3 读取MySQL中4级标签 为读取HBASE数据做准备
    val fourDS: Dataset[Row] = mysqlCoon.select('rule).where("id=81")
    val fourMap: Map[String, String] = fourDS.map(row => {
      //使用##切分
      row.getAs("rule").toString.split("##")
        //再用=切分
        .map(line => {
          val arr: Array[String] = line.split("=")
          (arr(0), arr(1))
        })
    }).collectAsList().get(0).toMap

    //将fourMap转化为HBaseMeta样例类

    var HbaseMeta: HBaseMeta = getHBaseMeta(fourMap)

    //4 读取MySQL 5级标签  为匹配年龄段做准备
    val fiveRow: Dataset[Row] = mysqlCoon.select('id, 'rule).where("pid=81")
    val fiveDF: DataFrame = fiveRow.map(row => {
      val id: String = row.getAs("id").toString
      val rule: String = row.getAs("rule").toString
      val arr: Array[String] = rule.split("-")
      var start = ""
      var end = ""
      if (arr != null && arr.length == 2) {
        start = arr(0)
        end = arr(1)
      }
      (id, start, end)
    }).toDF("id", "start", "end")



    //5 根据四级标签规则 读取HBASE中数据
    val HBaseDatas: DataFrame = spark.read.format("tools.HBaseDataSource")
      .option(HBaseMeta.ZKHOSTS, HbaseMeta.zkHosts)
      .option(HBaseMeta.ZKPORT, HbaseMeta.zkPort)
      .option(HBaseMeta.HBASETABLE, HbaseMeta.hbaseTable)
      .option(HBaseMeta.FAMILY, HbaseMeta.family)
      .option(HBaseMeta.SELECTFIELDS, HbaseMeta.selectFields)
      .load()

    //处理hbase中字符串格式为yyyyMMdd
    val HBaseDF: DataFrame = HBaseDatas.select('id as ("userId")
      , regexp_replace('birthday, "-", "") as ("birthday"))
    //HBaseDF.show(20)

    //6 根据五级标签 和 HBASE中数据进行匹配 得到新标签数据
    fiveDF.createOrReplaceTempView("fiveDF")
    HBaseDF.createOrReplaceTempView("HBaseDF")
    spark.sql(
      """
        |SELECT
        |h.userId,
        |CASE WHEN f.id=NULL OR f.id=''
        |THEN '86'  ELSE f.id END AS tagsId
        |FROM
        |HBaseDF h
        |LEFT JOIN
        |fiveDF f
        |ON
        |h.birthday BETWEEN f.start AND f.end
        |""".stripMargin).show(1000)


    val newAgeTag: DataFrame = HBaseDF.join(fiveDF,
      HBaseDF.col("birthday")
      .between(fiveDF.col("start"), fiveDF.col("end")))
      .select('userId.as("userId"), 'id.as("tagsId"))


    //7 解决程序多次运行出现的重复
    //a 读取HBASE历史数据
    //b 历史数据与新数据进行join 处理选取需要字段

  }

  def getHBaseMeta(fourMap: Map[String, String]): HBaseMeta = {
    HBaseMeta(
      fourMap.getOrElse(HBaseMeta.INTYPE, ""),
      fourMap.getOrElse(HBaseMeta.ZKHOSTS, ""),
      fourMap.getOrElse(HBaseMeta.ZKPORT, ""),
      fourMap.getOrElse(HBaseMeta.HBASETABLE, ""),
      fourMap.getOrElse(HBaseMeta.FAMILY, ""),
      fourMap.getOrElse(HBaseMeta.SELECTFIELDS, ""),
      fourMap.getOrElse(HBaseMeta.ROWKEY, "")
    )
  }
}
