package com.guchenbo.spark.stream

import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.{Seconds, StreamingContext}

import java.util.Date
import scala.util.Random

/**
 * @author guchenbo
 * @date 2022/4/15
 */
object KafkaStream {

  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("kafka stream").setMaster("local[*]")
    val ssc: StreamingContext = new StreamingContext(conf, Seconds(3))
    ssc.sparkContext.setLogLevel("WARN")

    //    val server = "izuf6c3gcnqmp26nhnx9qoz:9092"
    val server = "10.57.16.13:9092"
    val group = "streaming demo"
    val topic = "holmes-api-default-holmes-call-log"
    var offset = "earliest"
    /**
     * earliest
     * 当各分区下有已提交的offset时，从提交的offset开始消费；无提交的offset时，从头开始消费
     * 如果换了一个消费组，已提交的记录就是新消费组的
     * 如果换了一个全新的消费组，就没有已提交的记录
     *
     * latest
     * 当各分区下有已提交的offset时，从提交的offset开始消费；无提交的offset时，消费新产生的该分区下的数据
     *
     */

    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> server,
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "spark-kafka2222",
      "auto.offset.reset" -> offset,
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    val topics = Array(topic)
    val stream = KafkaUtils.createDirectStream[String, String](
      ssc,
      PreferConsistent,
      Subscribe[String, String](topics, kafkaParams)
    )

    //    println(stream.map(_.value()).map((_, 1)).reduceByKey(_ + _).count())
    //    println(stream.map(_.value()).count())

    stream.foreachRDD(rdd => {
      //      if (!rdd.isEmpty()) {
      //        val now = new Date().getTime
      val jsonData = rdd.map(_.value())
      println(jsonData.count())
      //        jsonData.foreach(println(_))
      //      }
    })

    ssc.start()
    ssc.awaitTermination()
  }
}
