package cn.doit.realanalys

import cn.doit.bean.LogBean
import cn.doit.commons.{PVAnalys, UVAnalys}
import cn.doit.utils.ConfigUtils
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.slf4j.LoggerFactory


/**
  * Created by Administrator on 2018/11/16.
  */
object SiteRealAnalys {
  val logger = LoggerFactory.getLogger("SiteRealAnalys")

  val batchintal = ConfigUtils.betch_intral
  val kafkaBrokers = ConfigUtils.kafka_brokers
  val kafkatopic = ConfigUtils.kafka_topic
  val kafkagroupid = ConfigUtils.kafka_groupid


  def main(args: Array[String]): Unit = {


    val conf: SparkConf = new SparkConf().setAppName("SiteRealAnalys").setMaster("local[*]")
    val ssc = new StreamingContext(conf, Seconds(batchintal))

    val preferredHosts = LocationStrategies.PreferConsistent

    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> kafkaBrokers,

      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      //"auto.commit.interval.ms" -> (30000L:java.lang.Long),
      "session.timeout.ms" -> "300000",
      //于配置socket请求的最大超时时间，默认值为30秒
      "request.timeout.ms" -> "500000",
      "max.poll.records" -> (300: Integer),
      "group.id" -> kafkagroupid,
      "auto.offset.reset" -> "latest",
      //      "auto.offset.reset" -> "earliest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    logger.info("start consumer kafka ---" + kafkatopic(0))


    val stream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream[String, String](
      ssc,
      preferredHosts,
      ConsumerStrategies.Subscribe[String, String](kafkatopic, kafkaParams)
    )

    stream.foreachRDD(fr => {

      val offsetRanges = fr.asInstanceOf[HasOffsetRanges].offsetRanges

      //开始你的业务
      val beanRdd: RDD[LogBean] = LogBean.parseRdd2LogBeanRdd(fr).filter(_.getEventType.equals("0"))

      //实时访客 模块
      //pv 指标
      PVAnalys.realFlowPV(beanRdd)
      //uv 指标
      UVAnalys.realFlowUV(beanRdd)


      //手动提交偏移量,kafka管理,有可能会产生重复消费
      stream.asInstanceOf[CanCommitOffsets].commitAsync(offsetRanges)

    })

    //千万不要忘了！
    ssc.start()
    ssc.awaitTermination()


  }


}
