package com._58city.spark.examples;

import java.io.Serializable;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import kafka.serializer.StringDecoder;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;
import scala.Tuple2;

public class JavaKafkaStream
  implements Serializable
{
  private static final long serialVersionUID = -4335551237319467936L;
  private transient JavaStreamingContext ssc = null;
  private transient Random random = new Random();
  private static final String zkconnect = "10.9.14.26:2181,10.9.14.27:2181,10.9.14.28:2181,10.9.14.29:2181";
  
  public void setUp()
  {
    SparkConf sparkConf = new SparkConf().setMaster("local[4]").setAppName(getClass().getSimpleName());
    
    this.ssc = new JavaStreamingContext(sparkConf, new Duration(500L));
  }
  
  public void tearDown()
  {
    if (this.ssc != null)
    {
      this.ssc.stop();
      this.ssc = null;
    }
  }
  
  public void testKafkaStream()
    throws InterruptedException
  {
    setUp();
    String topic = "testcase1";
    HashMap<String, Integer> topics = new HashMap<String, Integer>();
    topics.put(topic, Integer.valueOf(1));
    
    HashMap<String, String> kafkaParams = new HashMap<String, String>();
    kafkaParams.put("zookeeper.connect", zkconnect);
    kafkaParams.put("group.id", "wally-consumer-" + this.random.nextInt(10000));
    kafkaParams.put("auto.offset.reset", "smallest");
    
    JavaPairDStream<String, String> stream = KafkaUtils.createStream(this.ssc, String.class, String.class, StringDecoder.class, StringDecoder.class, kafkaParams, topics, StorageLevel.MEMORY_ONLY_SER());
    
    final Map<String, Long> result = Collections.synchronizedMap(new HashMap<String, Long>());
    
    JavaDStream<String> words = stream.map(new Function<Tuple2<String, String>,String>()
    {
		private static final long serialVersionUID = 1L;
		public String call(Tuple2<String, String> tuple2)
	        throws Exception
	      {
	        return (String)tuple2._2();
	      }
    });
    words.countByValue().foreachRDD(new Function<JavaPairRDD<String, Long>, Void>()
    {
		private static final long serialVersionUID = 1964273415217318874L;
		public Void call(JavaPairRDD<String, Long> rdd)
	        throws Exception
	      {
	        List<Tuple2<String, Long>> ret = rdd.collect();
	        for (Tuple2<String, Long> r : ret) {
	          if (result.containsKey(r._1())) {
	            result.put(r._1(), Long.valueOf(((Long)result.get(r._1())).longValue() + ((Long)r._2()).longValue()));
	          } else {
	            result.put(r._1(), r._2());
	          }
	        }
	        return null;
	      }
    });
    words.print();
    
    this.ssc.start();
  }
  
  public static void main(String[] args)
  {
    JavaKafkaStream stream = new JavaKafkaStream();
    try
    {
      stream.testKafkaStream();
    }
    catch (InterruptedException e)
    {
      e.printStackTrace();
    }
    stream.tearDown();
  }
}
