package com.rainsoft.center.isec.stream.utils;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;

import java.util.Collection;
import java.util.HashMap;
import java.util.Map;

/**
 * @Name spark.streaming.util.KafkaStreaming
 * @Description 自定义获取KafkaDStream工具类
 * @Author Elwyn
 * @Version 2017/11/15
 * @Copyright 上海云辰信息科技有限公司
 **/
public class CustomKafkaDStreamUtil<K, V> {

	/**
	 * 获取kafka相关的DStream对象
	 *
	 * @param brokers  kafka节点
	 * @param topics   topic集合
	 * @return
	 */
	public  JavaInputDStream<ConsumerRecord<K, V>>  getKafkaStreaming(JavaStreamingContext javaStreamingContext, String brokers, Collection<String>
			topics) {

		//kafka相关参数
		Map<String, Object> kafkaParams = new HashMap<>();
		kafkaParams.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers);
		kafkaParams.put(ConsumerConfig.GROUP_ID_CONFIG, "group5");
		kafkaParams.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "30000");
		kafkaParams.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000");
		kafkaParams.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
		kafkaParams.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");

		//利用 KafkaUtils.createDirectStream获取DStream对象;
		return KafkaUtils.createDirectStream(
				javaStreamingContext, LocationStrategies.PreferBrokers(),
				ConsumerStrategies.Subscribe(topics, kafkaParams));
	}
}
