package com.shellyan.gmall.rt.app

import java.lang

import com.alibaba.fastjson.JSON
import com.atguigu.realtime.gmall.common.Constant
import com.shellyan.gmall.rt.bean.StartupLog
import com.shellyan.gmall.rt.util.RedisUtil
import org.apache.spark.streaming.dstream.DStream
import redis.clients.jedis.Jedis

/**
 * @author Shelly An
 * @create 2020/9/4 9:18
 */
object DauApp extends BaseApp {
  override val master: String = "local[2]"
  override val appName: String = "DauApp"
  override val batchTime: Int = 3
  override val topics: Set[String] = Set(Constant.STARTUP_TOPIC)
  override val groupId: String = "DauApp"

  override def run(sourceStream: DStream[String]): Unit = {
    val filterStartupLogStream: DStream[StartupLog] = sourceStream
      .map((json: String) => JSON.parseObject(json, classOf[StartupLog]))
      .mapPartitions(
        (startupLogIt: Iterator[StartupLog]) => {
          val client: Jedis = RedisUtil.getClient
          val result: Iterator[StartupLog] = startupLogIt.filter(
            (startupLog: StartupLog) => {
              //print("过滤前："+startupLog.mid)
              val r: lang.Long = client.sadd(s"dau:uids:${startupLog.logDate}", startupLog.mid)
              //每次都加数据留存时间（24h）  也可如存在就不加了
              client.expire(s"dau:uids:${startupLog.logDate}", 24 * 60 * 60)
              r == 1
            }
          )
          client.close()
          result
        }
      )

    filterStartupLogStream.print(100)
    filterStartupLogStream.foreachRDD(rdd => {
      import org.apache.phoenix.spark._
      rdd.saveToPhoenix("GMALL_DAU",
        //从rdd中到seq中，按顺序，从seq到表里，按名称
        Seq("MID", "UID", "APPID", "AREA", "OS", "CHANNEL", "LOGTYPE", "VERSION", "TS", "LOGDATE", "LOGHOUR"),
        zkUrl = Option("hadoop102,hadoop103,hadoop104:2181")
      )
    })
  }
}
