package TestSparkStreaming;

import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.StreamingContext;
import org.apache.spark.streaming.dstream.ReceiverInputDStream;

public class TestSparkStreaming {
    public static void main(String[] args) {
        // 创建 SparkConf 对象
        SparkConf sparkConf = new SparkConf().setAppName(TestSparkStreaming.class.getName())
                // 设置master 本地运行设置 开启两个线程
                .setMaster("local[2]");
        // 创建 sparkContext 对象
        SparkContext sparkContext = new SparkContext(sparkConf);
        // 基于 sparkContext 设置 StreamingContext
        StreamingContext streamingContext = new StreamingContext(sparkContext, new Duration(1000 * 10));
        // 接收 端口 传递过来的数据  参数1 监听 端口的进程所在的IP  参数2 监听的端口号 参数3 设置储存级别
        ReceiverInputDStream<String> dStream = streamingContext.socketTextStream("127.0.0.1", 9999, StorageLevel.MEMORY_AND_DISK());



        // 打印dStream数据
        dStream.print();
        // 启动 StreamingContext
        streamingContext.start();

        // 等待被终止退出
        streamingContext.awaitTermination();


    }
}
