package com.atguigu.gmall.realtime.apps

import java.{lang, util}
import java.time.{Instant, LocalDate, LocalDateTime, ZoneId}
import java.time.format.DateTimeFormatter

import com.alibaba.fastjson.JSON
import com.atguigu.gmall.contants.GmallConstants
import com.atguigu.gmall.realtime.bean.StartUpLog
import com.atguigu.gmall.realtime.utils.{MykafkaUtil, RedisUtil}
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.log4j.Logger
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import redis.clients.jedis.Jedis
import org.apache.phoenix.spark._

/**
 * Date:2021/3/18
 * Author:csw
 * Description:
 */
object DauApp extends BaseApp {
  override val appName: String = "dau"
  override val seound: Int = 10
  def main(args: Array[String]): Unit = {
    runApp{
      val d1: InputDStream[ConsumerRecord[String, String]] = MykafkaUtil.getKafkaStream(GmallConstants.KAFKA_TOPIC_STARTUP, ssc)
      ssc.sparkContext.setLogLevel("warn")
      val d2: DStream[StartUpLog] = d1.map(record => {
        val value: String = record.value()
        val startUser: StartUpLog = JSON.parseObject(value, classOf[StartUpLog])
        val formatter1: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd")
        val localTime: LocalDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(startUser.ts), ZoneId.of("Asia/Shanghai"))
        startUser.logDate=localTime.format(formatter1)
        startUser.logHour=localTime.getHour+""
        startUser
      })
      d2.count().print()
      //和redis去重
      val d3: DStream[StartUpLog] = d2.mapPartitions(x => {
        val client: Jedis = RedisUtil.getJedisClient
        val fiterData: Iterator[StartUpLog] = x.filter(y => {
          val boolean: lang.Boolean = client.sismember("DAU" + LocalDate.now(), y.mid)
          !boolean
        })
        client.close();
        fiterData
      })
      d3.count().print()
      //val d3: DStream[StartUpLog] = broadcastDistinct(d2, ssc)
      //批次内去重
      val d4: DStream[StartUpLog] = d3.transform(data => {
        val someUser: RDD[((String, String), Iterable[StartUpLog])] = data.map(rdd1 =>
          ((rdd1.mid, rdd1.logDate), rdd1)
        ).groupByKey()
        val rddvalue: RDD[StartUpLog] = someUser.map(user => (user._1, user._2.toList.sortBy(_.ts).take(1)))
          .map(_._2(0))
        rddvalue
      })
      d4.count().print()

      //存入redis
      d4.foreachRDD(rdd=>{
        rdd.foreachPartition(user=>{
          val client: Jedis = RedisUtil.getJedisClient
          user.foreach(x=>{
            client.sadd("DAU" + LocalDate.now(),x.mid)
          })
          client.close()
        })

      })

      //导入hbase
      writeToHbase(d4)
    }
  }



  def broadcastDistinct(d2:DStream[StartUpLog],ssc:StreamingContext)={
    val d3: DStream[StartUpLog] = d2.transform(rdd => {
      val client: Jedis = RedisUtil.getJedisClient
      val setAll: util.Set[String] = client.smembers("DAU" + LocalDate.now())
      val bvalue: Broadcast[util.Set[String]] = ssc.sparkContext.broadcast(setAll)

      val fitterUser: RDD[StartUpLog] = rdd.filter(y => {
        val bool: Boolean = bvalue.value.contains(y.mid)
        !bool
      })
      client.close()
      fitterUser
    })
    d3
  }


  def writeToHbase(d2:DStream[StartUpLog])={
    d2.foreachRDD(rdd=>{
      rdd.saveToPhoenix("GMALL2020_DAU",
        Seq("MID", "UID", "APPID", "AREA", "OS", "CH", "TYPE", "VS", "LOGDATE", "LOGHOUR", "TS"),
        HBaseConfiguration.create(),
        Some("hadoop102:2181")
      )

    })

  }


}
