package org.huangrui.spark.java.streaming;

import org.apache.spark.SparkConf;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import scala.Tuple2;

import java.util.Arrays;

/**
 * nc -lp 9999
 * nc -L -p 9999
 * @Author hr
 * @Create 2024-10-21 18:19
 */
public class SparkStreaming04_Print {
    public static void main(String[] args) throws InterruptedException {
        SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming");
        JavaStreamingContext jsc = new JavaStreamingContext(conf, new Duration(5*1000L));

        JavaReceiverInputDStream<String> socketTextStream = jsc.socketTextStream("localhost", 9999);
        JavaPairDStream<String, Integer> wordCountDS = socketTextStream.flatMap(line -> Arrays.asList(line.split(" ")).iterator())
                .mapToPair(word -> new Tuple2<>(word, 1)).reduceByKey(Integer::sum);

        // TODO DStream确实就是对RDD的封装，但是不是所有的方法都进行了分装。有些方法不能使用：sortBy, sortByKey
        //      如果特定场合下，就需要使用这些方法，那么就需要将DStream转换为RDD使用

        // TODO DStream中print方法类似于RDD的行动算子
        // IllegalArgumentException : No output operations registered, so nothing to execute
        //        wordCountDS.print();
        wordCountDS.foreachRDD(
                rdd -> {
                    System.out.println("----------------------------------------");
                    System.out.println("Time :" + System.currentTimeMillis() + " ms");
                    System.out.println("----------------------------------------");
                    rdd.collect().forEach(System.out::println);
                }
        );
        jsc.start();
        jsc.awaitTermination();
    }
}
