package cn.itcast.czxy.BD18.bean

import java.util.Properties

import cn.itcast.czxy.BD18.test._
import com.typesafe.config.{Config, ConfigFactory}
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}


trait BaseMode {
  def setAppName:String
  def setLeven4Id:Int
  val spark: SparkSession = SparkSession.builder().master("local[*]").appName(setAppName).getOrCreate()
  private val config: Config = ConfigFactory.load()
  private var url = config.getString("jdbc.mysql.url")
  private var tableName = config.getString("jdbc.mysql.tableName")

  //隐式转换
  //引入隐式转换
  import  spark.implicits._
  //引入java 和scala相互转换
  import scala.collection.JavaConverters._
  //引入sparkSQL的内置函数
  import org.apache.spark.sql.functions._


  def exec() ={
    val mysqlDF: DataFrame = getMysqlDF()
    val leve4: Tagsfour = getLeve4(mysqlDF)
    val leve5: DataFrame = getLeve5(mysqlDF)

    val hbaseDF: DataFrame = getHbaseDF(leve4)
    val newTag: DataFrame = getNewTag(leve5,hbaseDF)

//    getAllTags(leve4,newTag)
  }

  //连接mysql
  def getMysqlDF() ={
    spark.read.jdbc(url, tableName, new Properties())
  }

  //获取4级标签
  def getLeve4(mysqlBD:DataFrame) ={
    val fourTagsDS: Dataset[Row] = mysqlBD.select("rule").where(s"id=${setLeven4Id}")
    val fourMap: Map[String, String] = fourTagsDS.map(row => {
      //使用##  切分再使用=切分
      row.getAs("rule").toString.split("##")
        .map(kv => {
          val arry: Array[String] = kv.split("=")
          (arry(0), arry(1))
        })
    }).collectAsList().get(0).toMap

    val fourTagsRule: Tagsfour = new Tagsfour(fourMap.getOrElse(Tagsfour.INTYPE, ""),
      fourMap.getOrElse(Tagsfour.ZKHOSTS, ""),
      fourMap.getOrElse(Tagsfour.ZKPORT, ""),
      fourMap.getOrElse(Tagsfour.HBASETABLE, ""),
      fourMap.getOrElse(Tagsfour.FAMILY, ""),
      fourMap.getOrElse(Tagsfour.SELECTFIELDS, ""),
      fourMap.getOrElse(Tagsfour.ROWKEY, "")
    )
    fourTagsRule
  }

  def getLeve5(mysqlBD:DataFrame) ={
    mysqlBD.select('rule ,'id ).where(s"pid=${setLeven4Id}").toDF()
  }

  def getHbaseDF(fourTagsRule:Tagsfour) ={
    val oidUserTags: DataFrame = spark.read.format("cn.itcast.czxy.BD18.tools.HBaseDataSource")
      .option("zkHosts", fourTagsRule.zkHosts)
      .option("zkPort", fourTagsRule.zkPort)
      //存储的表
      .option("hbaseTable", fourTagsRule.hbaseTable)
      .option("family", fourTagsRule.family)
      .option("selectFields", fourTagsRule.selectFields)
      .load()
    oidUserTags
  }

  def getNewTag(leve5:DataFrame,hbaseDF:DataFrame):DataFrame

  def getAllTags(tagsfour: Tagsfour,userTags:DataFrame) ={
    val oidUserTags: DataFrame = spark.read.format("cn.itcast.czxy.BD18.tools.HBaseDataSource")
      .option("zkHosts", tagsfour.zkHosts)
      .option("zkPort", tagsfour.zkPort)
      //存储的表
      .option("hbaseTable", "test")
      .option("family", "detail")
      .option("selectFields", "userId,tagsId")
      .load()

    val addtag = udf((oid: String, neww: String) => {
      var tagid = ""
      if (oid == "") {
        tagid = neww
      } else if (neww == "") {
        tagid = oid
      } else if (neww == "" && oid == "") {
        tagid = ""
      } else {
        val str: String = oid + "," + neww
        tagid= str.split(",").distinct.mkString(",")
      }
      tagid
    })


    val alltages: DataFrame = oidUserTags.join(userTags,oidUserTags("userId")===userTags("userId"))
    val upTag: DataFrame = alltages.select(
      when(oidUserTags.col("userId").isNotNull, oidUserTags("userId"))
        .when(userTags.col("userId").isNotNull, userTags("userId"))
        .as("userId")
      ,
      addtag(oidUserTags("tagsId"), userTags("tagsId")).as("tagsId")
    )
    upTag.repartition(4).write.format("cn.itcast.czxy.BD18.tools.HBaseDataSource")
      .option("zkHosts", tagsfour.zkHosts)
      .option("zkPort", tagsfour.zkPort)
      //存储的表
      .option("hbaseTable", "test")
      .option("family", "detail")
      .option("selectFields", "userId,tagsId")
      .save()
    spark.stop()
  }
}
