/*
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package jstx.test.streaming;

import java.util.List;
import java.util.regex.Pattern;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.StorageLevels;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;

import scala.Tuple2;

import com.google.common.collect.Lists;

/**
 * Counts words in UTF8 encoded, '\n' delimited text received from the network
 * every second.
 *
 * Usage: JavaNetworkWordCount <hostname> <port> <hostname> and <port> describe
 * the TCP server that Spark Streaming would connect to receive data.
 *
 * To run this on your local machine, you need to first run a Netcat server `$
 * nc -lk 9999` and then run the example `$ bin/run-example
 * org.apache.spark.examples.streaming.JavaNetworkWordCount localhost 9999`
 */
public final class JavaNetworkWordCount {
	private static final Pattern SPACE = Pattern.compile(" ");
	
	static long counts = 0;

	public static void main(String[] args) {
		String name = JavaNetworkWordCount.class.getSimpleName();

		args = new String[] { "hdp5", "9988" };
		if (args.length < 2) {
			System.err.println("Usage: JavaNetworkWordCount <hostname> <port>");
			System.exit(1);
		}

		// Create the context with a 1 second batch size
		SparkConf sparkConf = new SparkConf().setAppName(name);
		// sparkConf.setMaster("spark://hdp1:7077");

		sparkConf.setMaster("local[2]");
		// 定义 streamingContext，时间段定为20秒
		JavaStreamingContext ssc = new JavaStreamingContext(sparkConf,
				Durations.seconds(10));

		// socketTextStream 指定host 与 port
		JavaReceiverInputDStream<String> lines = ssc.socketTextStream(args[0],
				Integer.parseInt(args[1]), StorageLevels.MEMORY_AND_DISK_SER);
		
		JavaDStream<String> words = lines
				.flatMap(new FlatMapFunction<String, String>() {
					private static final long serialVersionUID = 1L;

					@Override
					public Iterable<String> call(String x) {
//						System.out.println(x);
						return Lists.newArrayList(SPACE.split(x));
					}
				});

		JavaPairDStream<String, Integer> wordCounts = words.mapToPair(
				new PairFunction<String, String, Integer>() {
					private static final long serialVersionUID = 1L;

					@Override
					public Tuple2<String, Integer> call(String s) {
						return new Tuple2<String, Integer>(s, 1);
					}
				}).reduceByKey(new Function2<Integer, Integer, Integer>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Integer call(Integer i1, Integer i2) {
				return i1 + i2;
			}
		});

		/*wordCounts.foreachRDD(new Function<JavaPairRDD<String,Integer>, Void>() {
			//long counts = 0;
			@Override
			public Void call(JavaPairRDD<String, Integer> v1) throws Exception {
				
				counts = v1.count();
				if(v1.count()>0){
					System.out.println("----------------------------------");
					List<Tuple2<String, Integer>> output = v1.collect();
					for (Tuple2<?, ?> tuple : output) {
						System.out.println(tuple._1() + ": " + tuple._2());
					}
					System.out.println("----------------------------------");
				}else{
					System.out.println("没有数据==============================");
				}
				
//				v1.saveAsTextFile("");
				return null;
			}
		});
		*/
		
//		System.out.println("=-------------------" + wordCounts.count()
//				+ "--------------------------------------------");
		wordCounts.print();
		
//		wordCounts.window(windowDuration, slideDuration)
		//wordCounts.print();

		// wordCounts.count().print();
		// wordCounts.count().
		// wordCounts.scalaToJavaLong(wordCounts, Integer.class);
//		
//		wordCounts
//		wordCounts.scalaToJavaLong(arg0, arg1)
//		wordCounts.saveAsHadoopFiles(
//				"hdfs://icluster/inputs/" + System.currentTimeMillis() / 1000,
//				"log", Text.class, IntWritable.class,
//				(Class<? extends OutputFormat<?, ?>>) TextOutputFormat.class);
		ssc.start();
		ssc.awaitTermination();
	}
}
