package SparkStreaming;

import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.*;
import org.apache.spark.api.java.Optional;
import org.apache.spark.api.java.function.*;
import org.apache.spark.streaming.*;
import org.apache.spark.streaming.api.java.*;
import scala.Tuple2;


import java.util.Arrays;
import java.util.List;


/** 增量 wordcount
 * @program: MySpark
 * @description
 * @author: tkk fendoukaoziji
 * @create: 2019-04-23 13:41
 **/
public class UpdateStateByKey {
    public static void main(String[] args) {
        Logger.getLogger("org.apache.spark").setLevel(Level.WARN); //设置日志级别warn
        SparkConf conf = new SparkConf().setAppName("repeatSparkStreaming").setMaster("local[*]");
        JavaStreamingContext ssc = new JavaStreamingContext(conf, Durations.seconds(1));
        ssc.checkpoint("text");
        JavaReceiverInputDStream<String> lines = ssc.socketTextStream("node3", 9993);
        JavaDStream<String> words = lines.flatMap(x -> Arrays.asList(x.split(" ")).iterator());
        JavaPairDStream<String, Integer> pairs= words.mapToPair(x -> new Tuple2<>(x, 1));
      JavaPairDStream<String, Integer> updatePairs = pairs.updateStateByKey((Function2<List<Integer>, Optional<Integer>, Optional<Integer>>) (integers, optional) -> {
          Integer newNum=0;
          if(optional.isPresent()){
              newNum=optional.get();
          }
          for (Integer value : integers) {
              newNum+=value;
          }
          return Optional.of(newNum);//return一个新值
      });

        JavaPairDStream<String, Integer> wordCounts = updatePairs.reduceByKey((i1, i2) -> i1 + i2);
        wordCounts.print();
        ssc.start();
        try {
            ssc.awaitTermination();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }
}
