package cn.itcast.tags.models.rule

import cn.itcast.tags.meta.HBaseMeta
import cn.itcast.tags.tools.HBaseTools
import org.apache.hadoop.hbase.client.{Put, Result}
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.StringType
import org.apache.spark.storage.StorageLevel

object GenderModel extends Logging{

  def main(args: Array[String]): Unit = {

    // TODO: 1. 构建SparkSession实例对象
    val spark: SparkSession = {
      // 1.a. 创建SparkConf 设置应用信息
      val sparkConf = new SparkConf()
        .setAppName(this.getClass.getSimpleName.stripSuffix("$"))
        .setMaster("local[4]")
        .set("spark.sql.shuffle.partitions", "4")
        // 由于从HBase表读写数据，设置序列化
        .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
        .registerKryoClasses(
          Array(classOf[ImmutableBytesWritable], classOf[Result], classOf[Put])
        )
      // 1.b. 建造者模式构建SparkSession对象
      val session = SparkSession.builder()
        .config(sparkConf)
        .getOrCreate()
      // 1.c. 返回会话实例对象
      session
    }

    import spark.implicits._

    val tagTable: String =
      """
		  |(
		  |SELECT `id`,
		  |       `name`,
		  |       `rule`,
		  |       `level`
		  |FROM `profile_tags`.`tbl_basic_tag`
		  |WHERE id = 318
		  |UNION
		  |SELECT `id`,
		  |       `name`,
		  |       `rule`,
		  |       `level`
		  |FROM `profile_tags`.`tbl_basic_tag`
		  |WHERE pid = 318
		  |ORDER BY `level` ASC, `id` ASC
		  |) AS basic_tag
		  |""".stripMargin

    val basicTagDF: DataFrame = spark.read
      .format("jdbc")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("url", "jdbc:mysql://bigdata-cdh01.itcast.cn:3306/?useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC")
      .option("dbtable", tagTable)
      .option("user", "root")
      .option("password", "123456")
      .load()

    basicTagDF.persist(StorageLevel.MEMORY_AND_DISK)


    /*
    root
    |-- id: long (nullable = false)
    |-- name: string (nullable = true)
    |-- rule: string (nullable = true)
    |-- level: integer (nullable = true)
    */

    val tagRule: String = basicTagDF.filter($"level" === 4)
      .head()
      .getAs[String]("rule")

    /*
      inType=hbase
      zkHosts=bigdata-cdh01.itcast.cn
      zkPort=2181
      hbaseTable=tbl_tag_users
      family=detail
      selectFieldNames=id,gender
     */
    logInfo(s"================业务标签规则${tagRule}=======================")

    val ruleMap: Map[String, String] = tagRule.split("\\n")
      .map {
        line =>
          val Array(attrName, attrValue) = line.trim.split("=")
          (attrName, attrValue)
      }
      .toMap

    logWarning(s"============ { ${ruleMap.mkString(", ")} } ===========")

    var businessDF:DataFrame=null
    if("hbase".equals(ruleMap("inType").toLowerCase)){

      val hbaseMeta: HBaseMeta = HBaseMeta.getHBaseMeta(ruleMap)
      businessDF = HBaseTools.read( spark, hbaseMeta.zkHosts, hbaseMeta.zkPort,
        hbaseMeta.hbaseTable, hbaseMeta.family,
        hbaseMeta.selectFieldNames.split(",").toSeq)

    } else {
      new RuntimeException("业务标签未提供数据源信息，获取不到业务数据，无法 计算标签")
    }

    /*业务数据
      +---+------+
      |id |gender|
      +---+------+
      |1  |2     |
      |10 |2     |
      |100|2     |
     */
    businessDF.printSchema()
    businessDF.show(20,false)

    val attrTagDF: DataFrame = basicTagDF
      .filter($"level" === 5)
      .select(
        $"id".as("tagId"),
        $"rule"
      )

    /**
     * 属性标签数据
     * +-----+----+
     * |tagId|rule|
     * +-----+----+
     * |319  |1   |
     * |320  |2   |
     * +-----+----+
     */
    attrTagDF.printSchema()
    attrTagDF.show(10,false)


    val joinDF: DataFrame = businessDF.join(
      attrTagDF, businessDF("gender") === attrTagDF("rule")
    )

    /**业务数据和属性标签相关联
     * +---+------+-----+----+
     * |id |gender|tagId|rule|
     * +---+------+-----+----+
     * |101|1     |319  |1   |
     * |103|1     |319  |1   |
     * |104|1     |319  |1   |
     */
    joinDF.printSchema()
    joinDF.show(10,false)

    val modelDF: DataFrame = joinDF.select(
      $"id".as("uid"),
      $"tagId".cast(StringType)
    )

    /**
     * +---+-----+
     * |uid|tagId|
     * +---+-----+
     * |101|319  |
     * |103|319  |
     */
    modelDF.printSchema()
    modelDF.show(10,false)


    // 当缓存数据不再被使用时，释放资源
    basicTagDF.unpersist()

    val profileDF: DataFrame = HBaseTools.read(
      spark, "bigdata-cdh01.itcast.cn", "2181", //
      "tbl_profile", "user", Seq("userId", "tagIds")
    )

    /**
     * +------+------+
     * |userId|tagIds|
     * +------+------+
     * +------+------+
     */
    profileDF.printSchema()
    profileDF.show(10,false)


    val mergeDF: DataFrame = modelDF.join(
      profileDF, modelDF("uid") === profileDF("userId"), "left"
    )
    /*
        +---+-----+------+------+
      |uid|tagId|userId|tagIds|
      +---+-----+------+------+
      |1  |320  |null  |null  |
      |102|320  |null  |null  |
      root
       |-- uid: string (nullable = true)
       |-- tagId: string (nullable = false)
       |-- userId: string (nullable = true)
       |-- tagIds: string (nullable = true)
     */
    mergeDF.printSchema()
    mergeDF.show(10, truncate = false)

    val merge_tags_udf = udf(
      (tagId: String, tagIds: String) => {
        tagIds.split(",").:+(tagId).distinct.mkString(",")
      }
    )

    val newProfileDF: DataFrame = mergeDF.select(
      $"uid".as("userId"),
      when($"tagIds".isNull, $"tagId")
        .otherwise(merge_tags_udf($"tagId", $"tagIds")).as("tagIds")
    )

    /**
     * +------+------+
     * |userId|tagIds|
     * +------+------+
     * |1     |320   |
     * |102   |320   |
     * |107   |319   |
     * |110   |320   |
     * |111   |319   |
     */
    newProfileDF.printSchema()
    newProfileDF.show(10,false)
    HBaseTools.write(
      newProfileDF, "bigdata-cdh01.itcast.cn", "2181", //
      "tbl_profile", "user", "userId"
    )

    spark.stop()
  }
}
