package org.huangrui.spark.java.streaming;

import org.apache.spark.SparkConf;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import scala.Tuple2;

import java.util.Arrays;

/**
 * @Author hr
 * @Create 2024-10-21 23:35
 */
public class SparkStreaming06_State_Window_1 {
    public static void main(String[] args) throws InterruptedException {
        SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming");
        JavaStreamingContext jsc = new JavaStreamingContext(conf, new Duration(3*1000L));
        jsc.checkpoint("cp");
        final JavaReceiverInputDStream<String> socketDS = jsc.socketTextStream("localhost", 9999);

        // Word Count
        final JavaDStream<String> flatDS = socketDS.flatMap(
                line -> Arrays.asList(line.split(" ")).iterator()
        );

        final JavaPairDStream<String, Integer> wordDS = flatDS.mapToPair(
                word -> new Tuple2<>(word, 1)
        );
        // reduceByKeyAndWindow : 当窗口范围比较大，但是滑动幅度比较小，那么可以采用增加数据和删除数据的方式
        // 无需重复计算，提升性能。
        JavaPairDStream<String, Integer> windowDS = wordDS.reduceByKeyAndWindow((a, b) -> a + b,
                (a, b) -> a - b,
                new Duration(9 * 1000),
                new Duration(3 * 1000));

        final JavaPairDStream<String, Integer> wordCountDS = windowDS.reduceByKey(Integer::sum);

        wordCountDS.print();

        jsc.start();
        jsc.awaitTermination();
    }
}
