package com.demo.spark.stream;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import scala.Tuple2;

import java.util.Arrays;
import java.util.Iterator;

public class StreamTemplate {
    public static void main(String[] args) throws InterruptedException {

        SparkConf sparkConf = new SparkConf().setMaster("local[*]").setAppName("data frame ");
        JavaSparkContext sparkContext = new JavaSparkContext(sparkConf);
        sparkContext.setLogLevel("ERROR");
        JavaStreamingContext javaStreamingContext = new JavaStreamingContext(sparkContext, Durations.seconds(3));
        JavaReceiverInputDStream<String> lines  = javaStreamingContext.socketTextStream("localhost", 9999);
        JavaDStream<String> stringJavaDStream = lines.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public Iterator<String> call(String s) throws Exception {
                String[] s1 = s.split(" ");
                return Arrays.asList(s1).iterator();
            }
        });
        JavaPairDStream<String, Integer> pairDStream = stringJavaDStream.mapToPair(s -> new Tuple2<>(s, 1));
        JavaPairDStream<String, Integer> pairDStream1 = pairDStream.reduceByKey((_v1, _v2) -> _v1 + _v2);
        pairDStream1.print(20);
        javaStreamingContext.start();
        javaStreamingContext.awaitTermination();

    }
}
