package com.atbeijing.handler

import java.text.SimpleDateFormat
import java.util
import java.util.Date
import com.atbeijing.bean.StartUpLog
import com.atbeijing.utils.RedisUtil
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.DStream
import redis.clients.jedis.Jedis

object DauHandler {

  /**
   * 向redis写数据
   * @param myGroupByKey
   */
  def writeRedis(myGroupByKey: DStream[StartUpLog]) = {
    myGroupByKey.foreachRDD(rdd => {
      // val client: Jedis = RedisUtil.getJedisClient 这里的代码是在driver端执行的,而连接不能序列化,会报错
      rdd.foreachPartition(partition => {
        val client: Jedis = RedisUtil.getJedisClient//这里的代码是在executor端执行,分区内的数据都将使用这个连接
        partition.foreach(StartUpLog =>{
          //存入数据  "DAU"+当天日期,mid
          client.sadd("DAU"+StartUpLog.logDate,StartUpLog.mid)
        })
        client.close()
      })
    })
  }

  /**
   * 批次内去重
   *
   * @param filterByRedisDStream
   */
  def myGroupByKey(filterByRedisDStream: DStream[StartUpLog]) = {
    //(mid+当天日期,数据对象),当天所有用户数据加个key
    val log: DStream[(String, StartUpLog)] = filterByRedisDStream.map(startUpLog => {
      (startUpLog.mid + startUpLog.logDate, startUpLog)
    })
    //通过key分组,起到过滤作用,同一个用户的所有记录
    val logGroup: DStream[(String, Iterable[StartUpLog])] = log.groupByKey()
    //根据具体业务,这里取用户每天第一登陆的记录,同一个用户的一条记录
    val value: DStream[(String, List[StartUpLog])] = logGroup.mapValues(_.toList.sortWith(_.ts < _.ts).take(1))
    //扁平化
    value.flatMap(_._2)
  }


  /**
   * 利用redis跨批次去重,取出当天的mid,然后filter过滤
   */
  def filterByRedis(startUpLogDStream: DStream[StartUpLog], ssc:StreamingContext ): DStream[StartUpLog] = {
    val sdf = new SimpleDateFormat("yyyy-MM-dd")
    val filterByRedisDStream: DStream[StartUpLog] = startUpLogDStream.transform(rdd => {
      //一个批次获取一次连接
      val client: Jedis = RedisUtil.getJedisClient
      //DAU+当天日期的key的value值
      val redisKey: String = "DAU" + sdf.format(new Date(System.currentTimeMillis()))
      val mids: util.Set[String] = client.smembers(redisKey)

      //广播变量发送给所有executor
      val midBC: Broadcast[util.Set[String]] = ssc.sparkContext.broadcast(mids)

      //过滤重复数据
      val startUpLogFilter: RDD[StartUpLog] = rdd.filter(StartUpLog => {
        !midBC.value.contains(StartUpLog.mid)
      })
      client.close()
      startUpLogFilter
    })
    filterByRedisDStream
  }










}
