package Demo4;


import com.google.common.base.*;
import com.google.common.base.Optional;
import kafka.serializer.StringDecoder;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;

import org.apache.spark.streaming.kafka.KafkaUtils;
import scala.Tuple2;

import java.util.*;

/**
 * Created by lenovo on 2017/12/18.
 *kafka做为数据源  vvv
 */
public class Streaming_KafkaJava {
    public static void main(String[] args){
        SparkConf conf =new SparkConf().setAppName("Streaming_KafkaJava").setMaster("local[2]").set("spark.testing.memory","2147480000");
      //  SparkConf conf = new SparkConf().setAppName("Streaming_TransformJava").setMaster("local[2]").set("spark.testing.memory","2147480000");
       JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(5));
        jssc.checkpoint(".");
        Map<String,String> kafkaParameter = new HashMap<String,String>();
        kafkaParameter.put("metadata.broker.list","hadoop1:9092,hadoop2:9092,hadoop3:9092");

        Set<String> topicList = new HashSet<String>();
        topicList.add("test2");

      JavaPairInputDStream<String,String> linesDStream = KafkaUtils.
              createDirectStream(jssc,String.class,String.class, StringDecoder.class,StringDecoder.class,kafkaParameter,topicList);

      JavaDStream<String> words = linesDStream.flatMap(new FlatMapFunction<Tuple2<String,String>, String>() {
            @Override
            public Iterable<String> call(Tuple2<String, String> line) throws Exception {
                return Arrays.asList(line._2.split(" ")) ;
            }
        });

      JavaPairDStream<String,Long> wordPair = words.mapToPair(new PairFunction<String, String, Long>() {
            @Override
            public Tuple2<String, Long> call(String s) throws Exception {
                return new Tuple2<String, Long>(s,1L);
            }
        });

      JavaPairDStream<String,Long> word =  wordPair.updateStateByKey(new Function2<List<Long>,Optional<Long>, Optional<Long>>() {
            @Override
            public Optional<Long> call(List<Long> v1, Optional<Long> v2) throws Exception {
               Long newValue = 0L;
                if(v2.isPresent()){
                    newValue=v2.get();
                }
                for(Long value:v1){
                    newValue += value;
                }
                return Optional.of(newValue);
            }
        });

        word.print();

        jssc.start();
        jssc.awaitTermination();
        jssc.close();
    }
}
