package com.pw.study.realtime.app

import com.pw.study.common.constants.TopicConstant
import com.pw.study.realtime.handle.{KafkaHandler, RDDHandler, RedisHandler}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{CanCommitOffsets, HasOffsetRanges}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object DAUApp extends BaseAPP {
  appName = "DAUApp"
  groupName = "DAUApp"


  def main(args: Array[String]): Unit = {
    conf.setAppName(appName)
    context = new StreamingContext(conf, Seconds(batchDuration))
    runApp({
      //kafka获取数据
      val ds: InputDStream[ConsumerRecord[String, String]] = KafkaHandler.getKafkaStream(Array(TopicConstant.STARTUP_LOG), context, groupName)
      //处理数据
      ds.foreachRDD(rdd => {
        println(s"rdd: $rdd")
        if (!rdd.isEmpty()) {
          //保存偏移量（消费kafka数量offset）
          val ranges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
          //处理kafka数据
          //封装数据
          val rddBean = RDDHandler.rddToBean(rdd)

          //同批次去重,根据（设备，时间）分组，ts排序，取出最早一个
          val rddETL = RDDHandler.rddToDataETL(rddBean)
          // 过滤去重当日数据
          val rddRedis = RedisHandler.rddToFilter(rddETL)
          //日志写入hbase
          println("---------------------------------------")
          println(rddRedis.count())
          rddRedis.cache()
          //保存数据到hbase(需要现在phoenix中先建表)
          RDDHandler.saveDataToHbase(rddRedis)
          //保存数据到redis

          RDDHandler.saveDataToRedis(rddRedis)

        // 提交kafka消费偏移量
          ds.asInstanceOf[CanCommitOffsets].commitAsync(ranges)

        }
      })

    })
  }
}
