package com.lvmama.rhino.common.utils.kafka

import kafka.serializer.StringDecoder
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka.KafkaUtils
//import org.apache.spark.streaming.kafka010.{KafkaUtils, LocationStrategies}
//import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe

/**
  * Created by yuanxiaofeng on 2016/6/23.
  */
class KafkaDStreamSource(config: Map[String, String]) {
  
  def createSource(ssc: StreamingContext, topic: String): DStream[KafkaPayload] = {
    val kafkaParams = config
    val kafkaTopics = Set(topic)

    //0.8 version
    KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](
      ssc,
      kafkaParams,
      kafkaTopics
    ).map(dstream => KafkaPayload(Option(dstream._1), dstream._2))

    // 0.10 version
//    KafkaUtils.createDirectStream[String, String](
//      ssc,
//      LocationStrategies.PreferConsistent,
//      Subscribe[String, String](kafkaTopics, kafkaParams)
//    ).map(record => KafkaPayload(Option(record.key()), record.value()))
  }
}

object KafkaDStreamSource {
  def apply(config: Map[String, String]): KafkaDStreamSource = new KafkaDStreamSource(config)
}