package cn.lsh.spark.streaming;

import org.apache.log4j.lf5.LogLevel;
import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.Optional;
import org.apache.spark.api.java.function.*;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.Time;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import scala.Tuple2;

import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class SparkStreamingTest {

	public static void main(String[] args) throws InterruptedException {
		//JavaReceiverInputDStream模式接收socket数据需要两个线程，一个接收数据，一个处理数据
		SparkConf conf = new SparkConf().setMaster("local[2]").setAppName("SparkStreamingTest");
		JavaSparkContext jsc = new JavaSparkContext(conf);
		jsc.setLogLevel("WARN");
		//第二个参数设置多长时间为一个批次
		JavaStreamingContext streamingContext = new JavaStreamingContext(jsc, Durations.seconds(5));

		JavaReceiverInputDStream<String> socketTextStream = streamingContext.socketTextStream("localhost", 9999);
		JavaDStream<String> wordDStream = socketTextStream.flatMap(s -> Arrays.asList(s.split(" ")).iterator());
		JavaPairDStream<String, Integer> wordJavaPairDStream = wordDStream.mapToPair(w -> new Tuple2<>(w, 1));

		testDStreamRddMethod(wordJavaPairDStream, streamingContext, socketTextStream);

		// totalPDStream.print();

		streamingContext.start();
		//阻塞当前线程，一直循环接收数据
		streamingContext.awaitTermination();
		//这里调用不到，但可以拖过close方法关掉streaming线程
		streamingContext.close();
	}

	public static void testDStreamRddMethod(JavaPairDStream<String, Integer> wordJavaPairDStream, JavaStreamingContext streamingContext, JavaReceiverInputDStream<String> socketTextStream) {
		// testForeachRDD(wordJavaPairDStream);
		// testUpdateStateByKey(wordJavaPairDStream, streamingContext);
		// testReduceByKeyAndWindow(wordJavaPairDStream, streamingContext);
		testTransform(socketTextStream);
	}

	public static void testForeachRDD(JavaPairDStream<String, Integer> wordJavaPairDStream) {
		JavaPairDStream<String, Integer> totalPDStream = wordJavaPairDStream.reduceByKey(new Function2<Integer, Integer, Integer>() {
			private static final long serialVersionUID = 4482785711453627642L;

			@Override
			public Integer call(Integer v1, Integer v2) throws Exception {
				System.out.println("2、dStream Transformation算子代码在Executor端执行，由outputOperator算子触发");
				return v1 + v2;
			}
		});
		//foreachRDD可以拿到DStreamRDD，可以拿到DStreamRDD内部是JavaRDD，对javaRDD的使用同样需要Action算子来触发
		//foreachRDD中的call方法内的非算子代码是在Driver端执行，可以使用这个算子动态改变广播变量
		totalPDStream.foreachRDD(new VoidFunction2<JavaPairRDD<String, Integer>, Time>() {
			private static final long serialVersionUID = -5616657002971031707L;

			@Override
			public void call(JavaPairRDD<String, Integer> rdd, Time time) throws Exception {
				System.out.println("1、dStream outputOperator算子代码在Driver端执行");
				JavaSparkContext javaSparkContext = new JavaSparkContext(rdd.context());
				Map<String, Object> property = new HashMap<>();
				//这里可以广播动态变量，比如读hdfs上的配置文件等
				Broadcast<Map<String, Object>> broadcast = javaSparkContext.broadcast(property);
				JavaPairRDD<String, Integer> rdd1 = rdd.mapToPair(new PairFunction<Tuple2<String, Integer>, String, Integer>() {
					private static final long serialVersionUID = 3515679202479493433L;

					@Override
					public Tuple2<String, Integer> call(Tuple2<String, Integer> t) throws Exception {
						System.out.println("3、rdd Transformation算子代码在Executor端执行，由Action算子触发");
						return new Tuple2<>(t._1() + "-", t._2());
					}
				});

				rdd1.foreach(new VoidFunction<Tuple2<String, Integer>>() {
					private static final long serialVersionUID = 4460202596609959082L;

					@Override
					public void call(Tuple2<String, Integer> t) throws Exception {
						System.out.println("4、rdd Action算子代码在Executor端执行");
						System.out.println("rdd----" + t);
					}
				});
			}
		});
	}

	/**
	 * 窗口函数，可用于统计一段时间内的数据
	 * 第一个参数：上一个窗口处理结果和刚进入窗口的批次数据的处理逻辑实现
	 * 第二个参数：上一个窗口处理结果和刚离开窗口的批次数据的处理逻辑实现
	 * 第三个参数：windowDuration,窗口时间，表示统计的时间段，是批次间隔时间的整数倍，除以滑动间隔就等于一个窗口有多少批数据
	 * 第四个参数：slideDuration，间隔时间，表示滑动间隔，控制窗口数据批次的间隔，是批次间隔时间的整数倍，
	 * @param wordJavaPairDStream
	 */
	public static void testReduceByKeyAndWindow(JavaPairDStream<String, Integer> wordJavaPairDStream, JavaStreamingContext streamingContext) {
		//三个参数的reduceByKeyAndWindow，不保存上一个窗口的结果，每次都计算在窗口内的所有批次数据
		// JavaPairDStream<String, Integer> dStream = wordJavaPairDStream.reduceByKeyAndWindow(Integer::sum, Durations.seconds(15), Durations.seconds(5));
		//四个参数的reduceByKeyAndWindow，保存上一个窗口的结果，需要设置checkpoint
		//每次计算拿上次窗口的结果减去离开窗口的批次数据，再加上刚加入窗口的批次数据
		streamingContext.checkpoint("file:/checkpoint");
		JavaPairDStream<String, Integer> dStream = wordJavaPairDStream.reduceByKeyAndWindow(new Function2<Integer, Integer, Integer>() {
			private static final long serialVersionUID = -6537003609415176468L;

			@Override
			public Integer call(Integer beforeTotal, Integer lastBatch) throws Exception {
				//缓存的上一个窗口处理结果 + 刚进入窗口的批次数据
				return beforeTotal + lastBatch;
			}
		}, new Function2<Integer, Integer, Integer>() {
			private static final long serialVersionUID = 6747222925283780948L;

			@Override
			public Integer call(Integer beforeTotal, Integer firstBatch) throws Exception {
				////缓存的上一个窗口处理结果 - 刚离开窗口的批次数据
				return beforeTotal - firstBatch;
			}
			//每隔5s统计15s内的数据
		}, Durations.seconds(15), Durations.seconds(5));
		dStream.print();
	}

	public static void testUpdateStateByKey(JavaPairDStream<String, Integer> wordJavaPairDStream, JavaStreamingContext streamingContext) {
		//这个updateStateByKey算子必须设置checkpoint,因为每一批的计算结果需要保存磁盘，内存也有一份数据，固定时间更新到磁盘，取btachInterval和10s之间更大的一个
		streamingContext.checkpoint("file:/checkpoint");
		JavaPairDStream<String, Integer> totalDStream = wordJavaPairDStream.updateStateByKey(new Function2<List<Integer>, Optional<Integer>, Optional<Integer>>() {
			private static final long serialVersionUID = 4070094934510845078L;

			@Override
			public Optional<Integer> call(List<Integer> currValues, Optional<Integer> lastValue) throws Exception {
				//第一个入参list，是当前key分好组后的vlaue集合，第二个参数是上一批的当前key计算结果
				int last = lastValue.or(0);
				for (int i : currValues) {
					last += i;
				}
				return Optional.of(last);
			}
		});
		totalDStream.print();
	}

	/**
	 * 和foreachRDD类似，不过transform是Transformation算子
	 * transform可以拿到DStream中的RDD，并做RDD到RDD的转换，内部不需要Action算子触发，但transform还是需要outputOperator算子触发
	 * transform也可以拿到动态广播变量
	 * @param lineDStream
	 */
	public static void testTransform(JavaReceiverInputDStream<String> lineDStream) {
		JavaSparkContext jsc = new JavaSparkContext(lineDStream.context().sparkContext());
		Broadcast<List<String>> bcBlackList = jsc.broadcast(Arrays.asList("zhangsan", "lisi"));
		JavaPairDStream<String, String> pairNameDStream = lineDStream.mapToPair(s -> {
			System.out.println("2、dStream Transformation算子代码在Executor端执行，由outputOperator算子触发");
			return new Tuple2<>(s.split(" ")[0], s);
		});
		JavaDStream<String> transform = pairNameDStream.transform(new Function<JavaPairRDD<String, String>, JavaRDD<String>>() {
			private static final long serialVersionUID = 1921031870876469757L;

			@Override
			public JavaRDD<String> call(JavaPairRDD<String, String> nameRdd) throws Exception {
				System.out.println("1、dStream transform算子代码在Driver端执行");
				//和foreachRDD一样，可以在call方法中动态改变广播变量
				JavaPairRDD<String, String> filter = nameRdd.filter(t -> {
					System.out.println("3、rdd Transformation算子代码在Executor端执行，由Action算子触发");
					return !bcBlackList.value().contains(t._1);
				});
				JavaRDD<String> map = filter.map(t -> t._2);
				return map;
			}
		});
		transform.print();
	}


}
