package com.roy.sparkDemos.streaming;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.StorageLevels;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.Time;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Tuple2;

import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.Arrays;

public class JavaNetWorkWindowWordCount {

    private static Logger logger = LoggerFactory.getLogger(JavaNetWorkWindowWordCount.class);

    /**
     * spark 提交指令
     * ./spark-submit --master yarn --class "com.roy.sparkDemos.streaming.JavaNetWorkWindowWordCount" <host> <port>
     * socket接口，需要在linux服务器上打开 nc -lk <port>  然后发送消息，可以进来计算。
     * @param args
     */
    public static void main(String[] args) throws InterruptedException {
        if (null == args || args.length != 2) {
            System.out.println("Usage: JavaNetWorkWindowWordCount <host> <port>");
            System.exit(1);
        }
        logger.warn("start-batch ----------------------");
        String host = args[0];
        int port = Integer.parseInt(args[1]);

        int batch = 5000;//stream 每五秒一个批次
        int windowSize = 10000; //每次获取十秒内的数据
        int slideSize = 5000; //每5秒启动一次stream计算

        SparkConf sparkConf = new SparkConf();
        sparkConf.setAppName("JavaNetWordWindowWordCount")
                .setMaster("local[4]");
        JavaStreamingContext jssc = new JavaStreamingContext(sparkConf,new Duration(batch));

        JavaReceiverInputDStream<String> lines = jssc.socketTextStream(host, port, StorageLevels.MEMORY_AND_DISK);

        lines.filter(line -> null != line && line.length()>0)
                .window(new Duration(windowSize),new Duration(slideSize))
                .foreachRDD((JavaRDD<String> rdd, Time time)->{
                    LocalDateTime batchTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(time.milliseconds()), ZoneId.systemDefault());
                    logger.warn("Batch Start Time = "+batchTime);
                    rdd.flatMap(words -> Arrays.asList(words.split(" ")).iterator())
                            .mapToPair(word -> new Tuple2<>(word, 1))
                            .reduceByKey((a, b) -> a + b)
                            .foreach(res -> {  //Tuple2<>(word,count)的结构
                                logger.warn("Got Word : "+res._1+" appears "+res._2 +" times");
                            });
                });

        logger.warn("end-batch ----------------------");
        jssc.start();
        jssc.awaitTermination();
    }
}
