/*
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package jstx.test.streaming;

import java.util.HashMap;
import java.util.Map;
import java.util.regex.Pattern;

import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;

import scala.Tuple2;

import com.google.common.collect.Lists;

/**
 * 测试kafka 消费者
 * @author zhanghongliang@hiveview.com
 * @date   2015年1月30日 下午4:24:12
 */
public final class JavaKafkaConsumerWordCount {
	private static final Pattern SPACE = Pattern.compile(" ");
	static Logger logger = Logger.getLogger(JavaKafkaConsumerWordCount.class);

	private JavaKafkaConsumerWordCount() {
	}

	
	public static void main(String[] args) {
		args = new String[]{
				"testclient",
				"hdp1:2181,hdp2:2181,hdp3:2181",
				"topic1",
				"1"
		};

		if(args.length<3){
			System.out.println("需要传入4个参数");
			System.out.println("1、client id");
			System.out.println("2、zookeeper 连接");
			System.out.println("3、topics 用逗号分隔");
			System.out.println("4、numPartitions 几个分片");
			System.exit(0);
		}
		
		String groupId = args[0];
		String zookeepers = args[1];
		String topics = args[2];
		Integer numPartitions = Integer.parseInt(args[3]);
		
		Map<String, Integer> topicsMap = new HashMap<String, Integer>();
		for (String topic : topics.split(",")) {
			topicsMap.put(topic, numPartitions);
		}
		
		// String zookeepers = "hdp4:2181,hdp5:2181,hdp6:2181";
		// 多长时间统计一次
		Duration batchInterval = Durations.seconds(20);
		SparkConf sparkConf = new SparkConf().setAppName("JavaKafkaConsumerWordCount");
//		 sparkConf.setMaster("local[2]");
		
		
		String master = System.getProperty("spark.master");
		if(master==null || "".equals(master.trim())){
			logger.warn("no master,default master local");
			sparkConf.setMaster("local[2]");
		}
		
		JavaStreamingContext ssc = new JavaStreamingContext(sparkConf,
				batchInterval);

		
		JavaPairReceiverInputDStream<String, String> kafkaStream = KafkaUtils
				.createStream(ssc, zookeepers, groupId, topicsMap);

		JavaDStream<String> lines = kafkaStream
				.map(new Function<Tuple2<String, String>, String>() {
					@Override
					public String call(Tuple2<String, String> arg0)
							throws Exception {
						// System.out.println("收到消息：" + arg0._2);
						// System.out.println("收到消息：" + arg0._1);
						logger.warn(Thread.currentThread().getName() +  " msg1:" + arg0._1 + "|msg2:" + arg0._2);
						return arg0._2();
					}
				});

		JavaDStream<String> words = lines
				.flatMap(new FlatMapFunction<String, String>() {
					@Override
					public Iterable<String> call(String arg0) throws Exception {
						logger.warn(Thread.currentThread().getName() +  " flatmap str:" + arg0);
						return Lists.newArrayList(SPACE.split(arg0));
					}
				});
		
		JavaPairDStream<String, Integer> wordCounts = words
				.mapToPair(new PairFunction<String, String, Integer>() {
					@Override
					public Tuple2<String, Integer> call(String s) {
						logger.warn(Thread.currentThread().getName() +  " wordcount mapToPair:" + s);
						return new Tuple2<String, Integer>(s, 1);
					}
				})
				.reduceByKey(
						new org.apache.spark.api.java.function.Function2<Integer, Integer, Integer>() {

							@Override
							public Integer call(Integer arg0, Integer arg1)
									throws Exception {
								logger.warn(Thread.currentThread().getName() + " wordcount reduceByKey:" + arg0 + "|" + arg1);
								return arg0 + arg1;
							}
						});

		wordCounts.print();
		
		// JavaDStream<String> jd = kafkaStream.count().map(
		// new Function<Long, String>() {
		// @Override
		// public String call(Long in) {
		// System.out.println("received.....");
		// return "Received " + in + " kafka events.";
		// }
		// });

		// jd.print();
		ssc.start();
		ssc.awaitTermination();
	}
}
