package TestSparkStreaming;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;

/**
 * 该算子的作用是将源DStream转成窗口DStream，从语法层面看不出它们有什么不同。
 * 下图演示了“每隔6秒钟输出一次最近3秒钟接收到的数据”：
 */
public class TestWindow {
    public static final String checkpointDir = "checkpoint_dir/";

    public static void main(String[] args) throws InterruptedException {


        SparkConf sparkConf = new SparkConf().setAppName("Test Window").setMaster("local[2]");

        JavaSparkContext javaSparkContext = new JavaSparkContext(sparkConf);

        JavaStreamingContext javaStreamingContext = new JavaStreamingContext(javaSparkContext, new Duration(1000));

        javaStreamingContext.checkpoint(checkpointDir);

        JavaReceiverInputDStream<String> receiverInputDStream = javaStreamingContext.socketTextStream("localhost", 9999);

        JavaDStream<String> window = receiverInputDStream.window(new Duration(1000 * 3), new Duration(1000 * 6));


        window.print();

        javaStreamingContext.start();

        javaStreamingContext.awaitTermination();



    }
}
