package com.gmall.utils

import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent


/**
 * <p>文件名称: kafka数据源 </p>
 * <p>描述: 直连模式 </p>
 * <p>创建时间: 2020/10/15</p>
 * @author <a href="mail to: 1034652894@qq.com" rel="nofollow">Demik Qin</a>
 * @version v1.0
 * @update [序号][日期YYYY-MM-DD] [更改人姓名][变更描述]
 */
object MyKafkaUtil {

    private val kafkaServers: String = PropertiesUtil.getValue("kafka.bootstrap.servers")

    var kafkaParams: Map[String, Object] = Map[String, Object](
        "bootstrap.servers" -> kafkaServers,
        "key.deserializer" -> classOf[StringDeserializer],
        "value.deserializer" -> classOf[StringDeserializer],
        "auto.offset.reset" -> "latest",
        "enable.auto.commit" -> (true: java.lang.Boolean)
    )


    def getKafkaStream(ssc: StreamingContext, groupId: String, topic: String): DStream[String] = {
        kafkaParams += "group.id" -> groupId
        KafkaUtils
          .createDirectStream[String, String](
              ssc,
              PreferConsistent,
              Subscribe[String, String](Set(topic), kafkaParams)
          )
          .map(_.value())
    }
}
