package cn.edu360.streaming.utils

import kafka.message.MessageAndMetadata
import kafka.serializer.StringDecoder
import kafka.utils.ZkUtils
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka.KafkaUtils

/**
  * DirectStream工具类
  * wzxjava@126.com
  * Created by wangzhixuan on 2017/05/18 19:44
  */
object StreamingUtil {

  def createCustomDirectKafkaStream(ssc: StreamingContext, kafkaParams: Map[String, String], zkUtils: ZkUtils, zkPath: String, topics: Set[String]): InputDStream[(String, String)] = {
    // 暂时只支持topics
    val topic = topics.last
    val storedOffsets = ZooKeeperOffsetUtil.readOffsets(zkUtils, zkPath, kafkaParams, topic)
    val kafkaStream = storedOffsets match {
      // 从最新的offsets开始
      case None =>
        KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](ssc, kafkaParams, topics)
      case Some(fromOffsets) =>
        // 从先前保存的offsets开始
        val messageHandler = (mmd: MessageAndMetadata[String, String]) => (mmd.key, mmd.message)
        KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder, (String, String)](ssc, kafkaParams, fromOffsets, messageHandler)
    }
    kafkaStream
  }

}
