package com.atguigu.bigdata.spark.streaming;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.Optional;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import scala.Tuple2;

import java.util.List;

public class SparkStreaming05_State_JAVA {
    public static void main(String[] args) throws InterruptedException {
        SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming");
        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(3));

        //输入DStream
        JavaReceiverInputDStream<String> lines = jssc.socketTextStream("localhost",9999);

        // 无状态数据操作，只对当前的采集周期内的数据进行处理
        // 在某些场合下，需要保留数据统计结果（状态），实现数据的汇总
        // 使用有状态操作时，需要设定检查点路径

        jssc.checkpoint("cp");

        JavaPairDStream<String,Integer> lineToMap = lines.mapToPair(new PairFunction<String, String, Integer>() {
            @Override
            public Tuple2<String, Integer> call(String s) throws Exception {
                return new Tuple2<String, Integer>(s, 1);
            }
        });
        // updateStateByKey：根据key对数据的状态进行更新
        // 传递的参数中含有两个值
        // 第一个值表示相同的key的value数据
        // 第二个值表示缓存区相同key的value数据
        JavaPairDStream<String, Integer> state = lineToMap.updateStateByKey(new Function2<List<Integer>, Optional<Integer>, Optional<Integer>>() {
            @Override
            public Optional<Integer> call(List<Integer> integers, Optional<Integer> integerOptional) throws Exception {
                int res = 0;
                for(int val : integers) {
                    res += val;
                }
                res += integerOptional.orElse(0);
                return Optional.of(res);
            }
        });

        state.print();
        // 1. 启动采集器
        jssc.start();
        // 2. 等待采集器的关闭
        jssc.awaitTermination();

    }
}
