package tomenttosa.prunuus.example.storm;

import java.util.HashMap;
import java.util.Map;
import java.util.Random;

import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.StormSubmitter;
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.BasicOutputCollector;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.TopologyBuilder;
import org.apache.storm.topology.base.BaseBasicBolt;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.tuple.Values;
import org.apache.storm.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class WordCountTopology {
	
	public static class RandomSentenceSpout extends BaseRichSpout{
		
		private static final Logger log = LoggerFactory.getLogger(RandomSentenceSpout.class);

		private static final long serialVersionUID = -5740224030838640000L;
		SpoutOutputCollector _collector;
		Random _rand;
		private static final String[] sentences = new String[] {
			"the cow jumped over the moon", 
			"an apple a day keeps the doctor away",
			"four score and seven years ago", 
			"snow white and the seven dwarfs", 
			"i am at two with nature" 	
		};

		@Override
		public void open(@SuppressWarnings("rawtypes") Map conf, TopologyContext context, SpoutOutputCollector collector) {
			_collector = collector;
			_rand = new Random();
		}

		@Override
		public void nextTuple() {
			Utils.sleep(100);
			String sentence = sentences[_rand.nextInt(sentences.length)];
			log.info("wordcount spout sentence: {}", sentence);
			_collector.emit(new Values(sentence));
		}

		@Override
		public void declareOutputFields(OutputFieldsDeclarer declarer) {
			declarer.declare(new Fields("sentence"));
		}

	}
	
	public static class SplitSentence extends BaseBasicBolt {

		private static final Logger log = LoggerFactory.getLogger(SplitSentence.class);
		private static final long serialVersionUID = -7272083946651361102L;

		@Override
		public void execute(Tuple input, BasicOutputCollector collector) {
			String sentence = input.getStringByField("sentence");
			String[] words = sentence.split(" ");
			for(String word: words){
				log.info("wordcount split word: {} ", word);
				collector.emit(new Values(word));
			}
		}

		@Override
		public void declareOutputFields(OutputFieldsDeclarer declarer) {
			declarer.declare(new Fields("word"));
		}

	}
	
	public static class WordCount extends BaseBasicBolt{
		private static final Logger log = LoggerFactory.getLogger(WordCount.class);

		private static final long serialVersionUID = 3107316291754482027L;

		Map<String, Integer> counts = new HashMap<String, Integer>();
		
		@Override
		public void execute(Tuple input, BasicOutputCollector collector) {
			String word = input.getStringByField("word");
			Integer count = counts.get(word);
			if(count == null){
				count = 0;
			}
			count++;
			counts.put(word, count);
			log.info("wordcount count word: {},{} ", word, count);
			collector.emit(new Values(word, count));
		}

		@Override
		public void declareOutputFields(OutputFieldsDeclarer declarer) {
			declarer.declare(new Fields("word", "count"));
		}
	}
	
	public static void startLocalCluster() throws Exception {
		TopologyBuilder builder = new TopologyBuilder();
		
		builder.setSpout("spout", new RandomSentenceSpout(), 1);
		builder.setBolt("split", new SplitSentence(), 1).shuffleGrouping("spout");
		builder.setBolt("count", new WordCount(), 1).fieldsGrouping("split", new Fields("word"));
		
		Config conf = new Config();
		conf.setDebug(false);
		
		conf.setMaxTaskParallelism(3);
		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology("word-count", conf, builder.createTopology());
		
		Thread.sleep(10000);
		cluster.shutdown();
	}
	
	public static void startRemoteCluster() throws Exception {
		TopologyBuilder builder = new TopologyBuilder();

		builder.setSpout("spout", new RandomSentenceSpout(), 1);
		builder.setBolt("split", new SplitSentence(), 1).shuffleGrouping("spout");
		builder.setBolt("count", new WordCount(), 1).fieldsGrouping("split", new Fields("word"));

		Config conf = new Config();
		conf.setDebug(false);
		conf.setNumWorkers(20);
		conf.setMaxSpoutPending(5000);
		StormSubmitter.submitTopology("word-count", conf, builder.createTopology());
		
	}
	
	
	public static void main(String[] args) throws Exception {
		startLocalCluster();
	}
}
