package org.huangrui.spark.java.streaming;

import org.apache.spark.SparkConf;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.receiver.Receiver;

import java.util.Random;
import java.util.concurrent.TimeUnit;

/**
 * @Author hr
 * @Create 2024-10-21 17:20
 */
public class SparkStreaming02_DIY {
    public static void main(String[] args) throws InterruptedException {
        SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming");
        JavaStreamingContext jsc = new JavaStreamingContext(conf, new Duration(5*1000L));

        JavaReceiverInputDStream<String> receiverStream = jsc.receiverStream(new MyReceiver(StorageLevel.MEMORY_ONLY()));
        receiverStream.print();

        jsc.start();
        jsc.awaitTermination();
    }
}
class MyReceiver extends Receiver<String> {
    private boolean flag = true;
    public MyReceiver(StorageLevel storageLevel) {
        super(storageLevel);
    }

    @Override
    public void onStart() {
        new Thread(new Runnable() {
            @Override
            public void run() {
                while (flag) {
                    String msg = new Random().nextInt(100) + "";
                    store(msg);
                    //暂停毫秒
                    try { TimeUnit.MILLISECONDS.sleep(100);} catch (InterruptedException e) {e.printStackTrace();}
                }
            }
        }).start();
    }

    @Override
    public void onStop() {
        flag = false;
    }
}
