package ApplicationTest.Example.KafKa

import ApplicationStart.SparkService.SparkService
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010.{KafkaUtils, LocationStrategy, OffsetRange}
import org.springframework.beans.factory.annotation.Autowired



object CreateRDDByStream {

  @Autowired
  var sparkService : SparkService = _

  val offsetRanges = Array(
    // topic, partition, inclusive starting offset, exclusive ending offset
    OffsetRange("test", 0, 0, 100),
    OffsetRange("test", 1, 0, 100)
  )

  //要求是Java Map
  // Import dependencies and create kafka params as in Create Direct Stream above
  val kafkaParams: Map[String, Object] = Map[String, Object](
    "bootstrap.servers" -> "localhost:9092,anotherhost:9092",
    "key.deserializer" -> classOf[StringDeserializer],
    "value.deserializer" -> classOf[StringDeserializer],
    "group.id" -> "example",
    "auto.offset.reset" -> "latest",
    "enable.auto.commit" -> (false: java.lang.Boolean)
  )

  import java.{util => ju} //重命名

  def CreateRDDSupportImplict[K, V](sc: SparkContext,
                                    kafkaParams:  ju.Map[String, Object],
                                    offsetRanges: Array[OffsetRange],
                                    locationStrategy: LocationStrategy) : RDD[ConsumerRecord[K, V]] =
    KafkaUtils.createRDD[K, V](sc, kafkaParams, offsetRanges, locationStrategy)

  def main(args: Array[String]): Unit = {

    import scala.collection.JavaConversions._

    val rdd = CreateRDDSupportImplict[String, String](sparkService.getSC, kafkaParams, offsetRanges, PreferConsistent)

  }







}
