package com.offset

import kafka.serializer.StringDecoder
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * 问题演示
  * 怎么解决？
  *   checkpoint
  */
object OffsetApp01 {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName("")
      .setMaster("local[6]")
      .set("spark.testing.memory","500000000")

    val ssc = new StreamingContext(conf, Seconds(10))

    //Map immutable.Map
    val map = Map[String, String](
      "metadata.broker.list" -> "shizhonghao:9092",
      "auto.offset.reset" -> "smallest"
    )

    //val topics = Set[String]()
    val topics = "TestTopic".split(",").toSet

    val message = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](ssc, map, topics)

    message.foreachRDD(
      rdd => {
        if (!rdd.isEmpty()){
          println("rddCount: "+rdd.count())
        }
      }
    )

    ssc.start()
    ssc.awaitTermination()

  }

}
