package org.shj.spark.streaming;

import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;

import kafka.serializer.StringDecoder;
import scala.Tuple2;


/**
 * Direct方式，可以确保只从Kafka只读取一次数据。而Receiver方式有可能会重复读取
 * 如果想要失败恢复，则需要开启checkpoint，详见 KafkaUtils.createDirectStream 中的注释
 *
 */
public class KafkaDirectWordCount {

	public static void main(String[] args) throws Exception{
		SparkConf conf = new SparkConf().setAppName("KafkaDirectWordCount");
		conf.setMaster("local[*]");
		
		JavaStreamingContext jsc = new JavaStreamingContext(conf, Durations.seconds(10));
		jsc.sparkContext().setLogLevel("WARN");
		
		Map<String, String> params = new HashMap<String,String>();
		params.put("metadata.broker.list", "ubuntu3:9092,ubuntu4:9092,ubuntu5:9092");
		
		Set<String> topics = new HashSet<String>();
		topics.add("shjtest");
		
		JavaPairInputDStream<String, String> lines = KafkaUtils.createDirectStream(jsc, String.class, String.class, 
				StringDecoder.class, StringDecoder.class, params, topics);
		
		JavaDStream<String> words = lines.flatMap(new FlatMapFunction<Tuple2<String,String>, String>() {
			private static final long serialVersionUID = -5624870488125722641L;

			//Tuple2<String, String> 是从Kafka传过来的数据，第一个String是指传过来的数据的ID,第二个String才是真正的数据
			public Iterator<String> call(Tuple2<String, String> line) throws Exception {
				return Arrays.asList(line._2.split("\\s+")).iterator();
			}
		});
		
		JavaPairDStream<String, Integer> result = words.mapToPair(new PairFunction<String, String, Integer>() {
			private static final long serialVersionUID = -3727931005625846885L;

			@Override
			public Tuple2<String, Integer> call(String word) throws Exception {
				return new Tuple2<String, Integer>(word,1);
			}
		}).reduceByKey(new Function2<Integer, Integer, Integer>() {
			private static final long serialVersionUID = -3169632339714592212L;

			public Integer call(Integer v1, Integer v2) throws Exception {
				return v1 + v2;
			}
		});
		
		result.print();
		
		jsc.start();
		jsc.awaitTermination();
		
		jsc.stop();

	}

}
