package com.neusoft.bd.myspark.spark01.ss;

import org.apache.spark.SparkConf;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import scala.Tuple2;

import java.util.Arrays;

public class SS06_Window {
    public static void main(String[] args) throws  Exception {
        SparkConf conf = new SparkConf();
        conf.setMaster("local[*]").setAppName("SparkMaster");
// 采集为批次是每3秒一次
        final JavaStreamingContext jsc = new JavaStreamingContext(conf, Duration.apply(3000));
        final JavaReceiverInputDStream<String> socketDS = jsc.socketTextStream("localhost", 9999);
        JavaDStream<String> stream = socketDS.flatMap(
                line ->
                        Arrays.asList(line.split(" ")).iterator()


        );
        JavaPairDStream<String, Integer> stringIntegerJavaPairDStream = stream.mapToPair(word -> new Tuple2<>(word, 1));
        /**
         * 窗口相当于一个水杯，窗口特指时间段，水相当于数据，窗口数据就是指定时间段内的数据
         */
        JavaPairDStream<String, Integer> window = stringIntegerJavaPairDStream.window(Duration.apply(5000),
                Duration.apply(2000));

        JavaPairDStream<String, Integer> wordsDS = stringIntegerJavaPairDStream.reduceByKey(Integer::sum);

        /**
         * 行动算子
         * No output operations registered, so nothing to execute
         */
        wordsDS.print();
        jsc.start();
        jsc.awaitTermination();
       
    }
}
