package com.atguigu.bigdata.spark.streaming;

import org.apache.spark.SparkConf;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.receiver.Receiver;
import scala.util.Random;

public class SparkStreaming03_DIY_JAVA {
    public static void main(String[] args) throws InterruptedException {
        // TODO 准备环境
        SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming");
        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(3));

        JavaReceiverInputDStream<String> lines = jssc.receiverStream(new MyReciver(StorageLevel.MEMORY_ONLY()));
        lines.print();

        jssc.start();

        jssc.awaitTermination();

    }
}

class MyReciver extends Receiver {
    private boolean flag = true;
    public MyReciver(StorageLevel storageLevel) {
        super(storageLevel);
    }

    @Override
    public void onStart() {
        new Thread(new Runnable() {
            @Override
            public void run() {
                while (flag) {
                    try {
                        Thread.sleep(500);
                        String msg = "采集数据为"+ String.valueOf(new Random().nextInt(10));
                        store(msg);
                    } catch (InterruptedException e) {
                        throw new RuntimeException(e);
                    }
                }
            }
        }).start();
    }

    @Override
    public void onStop() {
        flag = false;
    }
}
