package com.zhang.storm.wordcount;

import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.BasicOutputCollector;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.TopologyBuilder;
import org.apache.storm.topology.base.BaseBasicBolt;
import org.apache.storm.topology.base.BaseRichBolt;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.tuple.Values;
import org.apache.storm.utils.Utils;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;

public class WordCount {

    static class WordSpout extends BaseRichSpout {

        SpoutOutputCollector collector;

        //准备测试数据
        String[] text = {"hello husky bj",
                "hello world hi",
                "bj nihao hi"};
        Random r = new Random();

        @Override
        public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) {
            this.collector = spoutOutputCollector;
        }

        @Override
        public void nextTuple() {

            List line = new Values(text[r.nextInt(text.length)]);
            this.collector.emit(line);
            System.err.println("spout emit——————————>" + line);
            Utils.sleep(1000);
        }

        @Override
        public void declareOutputFields(OutputFieldsDeclarer declarer) {
            declarer.declare(new Fields("words"));
        }
    }

    static class SplitBolt extends BaseRichBolt {

        OutputCollector collector;

        @Override
        public void prepare(Map map, TopologyContext topologyContext, OutputCollector outputCollector) {
            this.collector = outputCollector;
        }

        @Override
        public void execute(Tuple tuple) {
            String text = tuple.getString(0);
            String[] words = text.split(" ");
            for (String word : words) {
                List w = new Values(word);
                this.collector.emit(w);
            }
        }

        @Override
        public void declareOutputFields(OutputFieldsDeclarer declarer) {
            declarer.declare(new Fields("word"));
        }
    }

    static class CountBolt extends BaseRichBolt {

        Map map = new HashMap<String, Integer>();//存出现的词

        @Override
        public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
            // TODO Auto-generated method stub

        }

        /**
         * 获取tuple中每一个单词，且按照单词统计输出出现的次数
         */
        @Override
        public void execute(Tuple input) {

            //获取单词
            String word = input.getStringByField("word");

            //设置变量：单词数
            int count = 1;

            //如果map中有了，就+1.否则就把新词放入map
            if (map.containsKey(word)) {
                count = (int) map.get(word) + 1;
            }

            map.put(word, count);

            //输出该单词、出现次数
            System.out.println(word + "出现了===============" + count);
        }

        @Override
        public void declareOutputFields(OutputFieldsDeclarer declarer) {
            // TODO Auto-generated method stub

        }

    }

    public static void main(String[] args) {
        TopologyBuilder builder = new TopologyBuilder();
        builder.setSpout("spout", new WordSpout(), 3);
        builder.setBolt("split", new SplitBolt(), 2).shuffleGrouping("spout");
        builder.setBolt("count", new CountBolt()).fieldsGrouping("split", new Fields("word"));

        Config config = new Config();
        config.setDebug(true);
        config.setMaxTaskParallelism(5);

        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology("word-count", config, builder.createTopology());

    }

}
