package cn.itcast.czxy.BD18

import java.util.{Date, Properties}

import cn.itcast.czxy.BD18.bean.Tagsfour
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

object LastTimeTag {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder().master("local[*]").appName("LastTimeTag").getOrCreate()
    val url="jdbc:mysql://bd001:3306/tags_new?userUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&user=root&password=123456"
    val tableName="tbl_basic_tag"
    val properties = new Properties()
    val mysqlBD: DataFrame = spark.read.jdbc(url,tableName,properties)

    import spark.implicits._
    import org.apache.spark.sql.functions._

    val sijibq: Map[String, String] = mysqlBD.select("rule").where("id=78").map(row => {
      row.getAs("rule").toString.split("##").map(kv => {
        val strings = kv.split("=")
        (strings(0), strings(1))
      })
    }).collectAsList().get(0).toMap

    val tagsfour: Tagsfour = getHbaseMeta(sijibq)

    val wujibq: DataFrame = mysqlBD.select("id", "rule").where("pid=78").map(row => {
      val id: String = row.getAs("id").toString
      val rule: String = row.getAs("rule").toString
      val strings = rule.split("-")
      if (strings.length > 1) {
        (id, strings(0).toInt, strings(1).toInt)
      }else{
        (id, 0, 0)
      }

    }).toDF("id", "start", "stop")

    val hbaseDF: DataFrame = spark.read.format("cn.itcast.czxy.BD18.tools.HBaseDataSource")
      .option(Tagsfour.ZKHOSTS, tagsfour.zkHosts)
      .option(Tagsfour.ZKPORT, tagsfour.zkPort)
      .option(Tagsfour.HBASETABLE, tagsfour.hbaseTable)
      .option(Tagsfour.FAMILY, tagsfour.family)
      .option(Tagsfour.SELECTFIELDS, tagsfour.selectFields)
      .load().map(row=>{
      (row.getAs("memberId").toString,row.getAs("finishTime").toString.toLong)
    }).toDF("memberId","finishTime")

//    hbaseDF.createOrReplaceTempView("t1")
//    val frame: DataFrame = spark.sql("select memberId,max(finishTime) as finishTime from t1 GROUP BY memberId")

    val frame: DataFrame = hbaseDF.groupBy('memberId).agg(max('finishTime).as("finishTime"))
    val time = new Date().getTime
//    val getday = udf((finishTime: Long) => {
//
//      val l = time - finishTime*1000
//      val sj = (l / (1000 * 60 * 60 * 24)).toString
//      sj
//    })
    val usersj: DataFrame = frame.select('memberId.as("userId"),(datediff(current_timestamp(),from_unixtime('finishTime))-280).as("daytime"))

//    val usersj: DataFrame = frame.select('memberId.as("userId"),(getday('finishTime)-200).as("daytime"))
//    val usersj: DataFrame = frame.select('memberId.as("userId"),(getday('finishTime)-min(getday('finishTime)).over()).as("daytime"))
    val newUserDayTime: DataFrame = usersj.join(wujibq,usersj("daytime").between(wujibq("start"),wujibq("stop"))).select('userId,'id.as("tagsId"))

    val oidUserTags: DataFrame = spark.read.format("cn.itcast.czxy.BD18.tools.HBaseDataSource")
      .option("zkHosts", tagsfour.zkHosts)
      .option("zkPort", tagsfour.zkPort)
      //存储的表
      .option("hbaseTable", "test")
      .option("family", "detail")
      .option("selectFields", "userId,tagsId")
      .load()


    val alltages= oidUserTags.join(newUserDayTime,oidUserTags("userId")===newUserDayTime("userId"))

    val addtag = udf((oid: String, neww: String) => {
      var tagid = ""
      if (neww == "" && oid == "") {
        tagid = ""
      } else if (oid == "") {
        tagid = neww
      } else if (neww == "") {
        tagid = oid
      } else  {
        val str: String = oid + "," + neww
        tagid= str.split(",").distinct.mkString(",")
      }
      tagid
    })

    val upTag: DataFrame = alltages.select(
      when(oidUserTags.col("userId").isNotNull, oidUserTags("userId"))
        .when(newUserDayTime.col("userId").isNotNull, newUserDayTime("userId"))
        .as("userId")
      ,
      addtag(oidUserTags("tagsId"), newUserDayTime("tagsId")).as("tagsId")
    )


    val value: Dataset[Row] = upTag.repartition(2)

//    upTag.show()
    value.write.format("cn.itcast.czxy.BD18.tools.HBaseDataSource")
      .option("zkHosts", tagsfour.zkHosts)
      .option("zkPort", tagsfour.zkPort)
      //存储的表
      .option("hbaseTable", "test")
      .option("family", "detail")
      .option("selectFields", "userId,tagsId")
      .save()

    spark.stop()

  }


  def getHbaseMeta(sijibq:Map[String,String]): Tagsfour ={
    Tagsfour(
      sijibq.getOrElse(Tagsfour.INTYPE,""),
      sijibq.getOrElse(Tagsfour.ZKHOSTS,""),
      sijibq.getOrElse(Tagsfour.ZKPORT,""),
      sijibq.getOrElse(Tagsfour.HBASETABLE,""),
      sijibq.getOrElse(Tagsfour.FAMILY,""),
      sijibq.getOrElse(Tagsfour.SELECTFIELDS,""),
      sijibq.getOrElse(Tagsfour.ROWKEY,"")
    )
  }
}
