package com.stream;

import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.*;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.sources.In;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import scala.Tuple2;
import scala.actors.threadpool.Arrays;

import java.util.ArrayList;
import java.util.List;
import java.util.function.Consumer;

//在linux中使用nc -lk 9999打开端口输入数据
public class StreamingTest {

    public static void main(String[] args) {
        SparkConf conf=new SparkConf();
        conf.setMaster("local[2]").setAppName("streaming-test");
        JavaSparkContext sc=new JavaSparkContext(conf);
        JavaStreamingContext jsc=new JavaStreamingContext(sc, Durations.seconds(5));

        JavaReceiverInputDStream<String> server0 = jsc.socketTextStream("server0", 9999);
        JavaDStream<String> words = server0.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public Iterable<String> call(String s) throws Exception {
                return Arrays.asList(s.split(" "));
            }
        });

        JavaPairDStream<String, Integer> pair = words.mapToPair(new PairFunction<String, String, Integer>() {


            @Override
            public Tuple2<String, Integer> call(String word) throws Exception {


                return new Tuple2<>(word, 1);
            }
        });
        JavaPairDStream<String, Integer> reduceByKey = pair.reduceByKey(new Function2<Integer, Integer, Integer>() {
            @Override
            public Integer call(Integer v1, Integer v2) throws Exception {
                return v1 + v2;
            }
        });

//        reduceByKey.print();

        reduceByKey.foreachRDD(new VoidFunction<JavaPairRDD<String, Integer>>() {
            @Override
            public void call(JavaPairRDD<String, Integer> rdd) throws Exception {
                System.out.println("=======黑名单列表=====");
                SparkContext context = rdd.context();
                JavaSparkContext javaSparkContext = new JavaSparkContext(context);
                JavaRDD<String> hmd = javaSparkContext.textFile("./sparkstream/heimingdan.txt");
                JavaRDD<String> map = hmd.flatMap(new FlatMapFunction<String, String>() {
                    @Override
                    public Iterable<String> call(String s) throws Exception {
                        return Arrays.asList(s.split(","));
                    }
                });
                List<String> collect = map.collect();
                Broadcast<List<String>> broadcast = javaSparkContext.broadcast(collect);
                    broadcast.value().forEach(new Consumer<String>() {
                        @Override
                        public void accept(String s) {
                            System.out.println("s = " + s);
                        }
                    });

                JavaPairRDD<String, Integer> filter = rdd.filter(new Function<Tuple2<String, Integer>, Boolean>() {
                    @Override
                    public Boolean call(Tuple2<String, Integer> tuple2) throws Exception {
                        System.out.println("访问人及次数 = " + tuple2);
                        return true;
                    }
                });
                filter.collect();
            }
        });

        jsc.start();
        jsc.awaitTermination();


//        sc.close();
    }
}