package com.hw.spark.service;

import java.util.Arrays;
import java.util.Date;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.VoidFunction2;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import com.hw.spark.listener.SparkStreamListener;

import scala.Tuple2;


@Service
public class SparkStreamService {
	
	private SparkConf sc=new SparkConf()
			.setMaster("local[2]")
			.set("spark.extraListeners", "com.hw.spark.listener.SparkSQLListener")
//			.set("spark.extraListeners", "com.hw.spark.listener.SparkStreamListener")
			.setAppName("spark-stream-tcp");
	
//	sparkContext用来操作javaRDD,创建StreamContext来操作JavaDStream，每2分钟处理一次数据
	private JavaStreamingContext jsc=new JavaStreamingContext(sc,Durations.seconds(60*2));
	
	private String host="127.0.0.1";
	
	private static Logger log=LoggerFactory.getLogger(SparkStreamService.class);
	
	public void sparkStreamTcp() throws InterruptedException{
		
		jsc.addStreamingListener(new SparkStreamListener());
		
//		创建一个DStream,监听ip端口
		JavaReceiverInputDStream<String> lines=jsc.socketTextStream(host, 9999);
		log.info("--------------------启动监听本地端口：9999--------------------");
//		单词分割，生成JavaDstream
		JavaDStream<String> words=lines.flatMap(s->Arrays.asList(s.split(" ")).iterator());
//		数据转换，形成JavaPairDStream
		JavaPairDStream<String,Integer> pairs=words.mapToPair(x-> new Tuple2<>(x,1));
//		数据计算，统计单词数
		JavaPairDStream<String,Integer> wordcount=pairs.reduceByKey((v1,v2)->v1+v2);
		wordcount.print();
		
//		循环把每个JavaPairRDD都聚合
//		JavaPairRDD<String,Integer> rdds=jsc.sparkContext().parallelizePairs(Arrays.asList());
		wordcount.foreachRDD(r->r.saveAsTextFile("F:\\DStream"+new Date().getSeconds()));
		
//		启动流计算
		log.info("--------------------开始计算--------------------");
		jsc.start();
//		等待流计算完成
//		jsc.awaitTerminationOrTimeout(10000);
		jsc.awaitTermination();
//		jsc.stop();
	}
	
	/**
	 * @author hw
	 * 定义spark函数
	 * spark函数有：Function，Function2，VoidFunction，
	 * VoidFunction2，MapFunction，FlatMapFunction，PairFunction 等
	 */
	class JavaFunction implements VoidFunction2<JavaPairRDD<String, Integer>,JavaPairRDD<String, Integer>>{

		@Override
		public void call(JavaPairRDD<String, Integer> v1, JavaPairRDD<String, Integer> v2) throws Exception {
			v2=v1.union(v2);
			
		}
		
	}
	
}
