package org.shj.spark.streaming;

import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.ConnectException;
import java.net.Socket;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Iterator;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.receiver.Receiver;

import com.google.common.io.Closeables;

import scala.Tuple2;

public class MyReceiver extends Receiver<String>{
	private static final long serialVersionUID = 4347644859794552389L;
	private String host;
	private int port = -1;
	
	public MyReceiver(String host, int port) {
		super(StorageLevel.MEMORY_AND_DISK());
		this.host = host;
		this.port = port;
	}

	@Override
	public void onStart() {
		// 启动接收数据的线程
		new Thread() {
			public void run() {
				receive();
			}
		}.start();
		
	}

	@Override
	public void onStop() {
		// There is nothing much to do as the thread calling receive()
	    // is designed to stop by itself if isStopped() returns false
	}
	
	private void receive() {
		try {
	      Socket socket = null;
	      BufferedReader reader = null;
	      String userInput = null;
	      try {
	        // connect to the server
	        socket = new Socket(host, port);
	        reader = new BufferedReader(
	            new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8));
	        // Until stopped or connection broken continue reading
	        while (!isStopped() && (userInput = reader.readLine()) != null) {
	          System.out.println("Received data '" + userInput + "'");
	          store(userInput);
	        }
	      } finally {
	        Closeables.close(reader, /* swallowIOException = */ true);
	        Closeables.close(socket,  /* swallowIOException = */ true);
	      }
	      // Restart in an attempt to connect again when server is active again
	      restart("Trying to connect again");
	    } catch(ConnectException ce) {
	      // restart if could not connect to server
	      restart("Could not connect", ce);
	    } catch(Throwable t) {
	      restart("Error receiving data", t);
	    }
	}
	
	public static void main(String[] args) throws Exception {
		SparkConf conf = new SparkConf().setMaster("local[2]").setAppName("MyReceiver");
		
		//第二个参数是指：每收集多长时间的数据就划分一个RDD去执行
		//如果在时间间隔内没有数据的话，会启动空的Job，这样是会造成调度资源的浪费。实际应用中，在提交Job之前应该判断一下。
		JavaStreamingContext jsc = new JavaStreamingContext(conf, Durations.seconds(5));
		
		JavaReceiverInputDStream<String> lines = jsc.receiverStream(new MyReceiver("ubuntu1", 8888));
		
		JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
			private static final long serialVersionUID = 1L;

			public Iterator<String> call(String line) throws Exception {
				return Arrays.asList(line.split(" ")).iterator();
			}
		});

		JavaPairDStream<String, Integer> pair = words.mapToPair(new PairFunction<String, String, Integer>() {
			private static final long serialVersionUID = 2823007325722993181L;

			@Override
			public Tuple2<String, Integer> call(String word) throws Exception {
				return new Tuple2<String, Integer>(word, 1);
			}
		});
		
		JavaPairDStream<String, Integer> wordCount = pair.reduceByKey(new Function2<Integer, Integer, Integer>() {
			private static final long serialVersionUID = 8560445270191804880L;

			@Override
			public Integer call(Integer v1, Integer v2) throws Exception {
				return v1 + v2;
			}
		});
		
		jsc.start();
		jsc.awaitTermination();
		jsc.close();
	}

}
