import com.typesafe.config.{Config, ConfigFactory}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming.kafka010.{KafkaUtils, OffsetRange}
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.JavaConverters._
object BasicRdd {

  def main(args: Array[String]): Unit = {

    val conf: Config = ConfigFactory.load()
    val kafkaParams = Map[String, Object](

      "bootstrap.servers" -> conf.getString("kafka.brokers")
      , "key.deserializer" -> classOf[StringDeserializer]
      ,"group.id" -> conf.getString("kafka.groupid")
      , "value.deserializer" -> classOf[StringDeserializer]
    ).asJava

    val sparkconf=new SparkConf().setMaster("local[*]").setAppName(s"${this.getClass.getSimpleName}")
    val sc = new SparkContext(sparkconf)

    val topic = conf.getString("kafka.topics").split(",").toSet
      .head

    val offsetRanges = Array(
      OffsetRange(topic, 0,0,100)
      ,OffsetRange(topic,1,0,100)
      ,OffsetRange(topic,2,0,100)
    )

    val rdd = KafkaUtils.createRDD[String,String](sc,kafkaParams,offsetRanges
    ,PreferConsistent)

    rdd.collect().foreach(println)
    sc.stop()


  }

}
