package org.huangrui.spark.java.streaming;

import org.apache.spark.SparkConf;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import scala.Tuple2;

import java.util.Arrays;

/**
 * @Author hr
 * @Create 2024-10-22 2:08
 */
public class SparkStreaming07_Method {
    public static void main(String[] args) throws InterruptedException {
        SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming");
        JavaStreamingContext jsc = new JavaStreamingContext(conf, new Duration(3*1000L));

        final JavaReceiverInputDStream<String> socketDS = jsc.socketTextStream("localhost", 9999);

        JavaPairDStream<String, Integer> wordToOne = socketDS.flatMap(line -> Arrays.asList(line.split(" ")).iterator())
                .mapToPair(word -> new Tuple2<>(word, 1));

        // TODO code => 原语
        // int i = 10; (Driver 1)
        wordToOne.foreachRDD(
                rdd -> {
                    // TODO code => 算子
                    //int j = 20; (Driver X) （周期性执行）
                    //rdd.collect().forEach(System.out::println);
                    rdd.foreach(
                            (num) -> {
                                // TODO code => 内部逻辑
                                // int k = 30; (Executor N)
                                System.out.println(num);
                            }
                    );
                }
        );

        // Code : Driver端
        wordToOne.map(
                tuple -> {
                    // TODO code Executor => 内部逻辑
                    return tuple;
                }
        );

        jsc.start();
        jsc.awaitTermination();
    }
}
