package com.test.study.ToolsUtils.strom.workdsum;

import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;

import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichBolt;
import org.apache.storm.tuple.Tuple;

@SuppressWarnings("serial")
public class AllBolt extends BaseRichBolt {
	@SuppressWarnings({ "unused", "rawtypes" })
	private Map stormConf;
	@SuppressWarnings("unused")
	private TopologyContext context;
	@SuppressWarnings("unused")
	private OutputCollector collector;

	@SuppressWarnings("rawtypes")
	@Override
	public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
		this.stormConf = stormConf;
		this.context = context;
		this.collector = collector;

	}

	HashMap<String, Integer> hashMap = new HashMap<String, Integer>();

	@Override
	public void execute(Tuple input) {
		String work = input.getStringByField("words");
		Integer count = input.getIntegerByField("count");
		hashMap.put(work, count);
		int allwordCount=0;
		for (Entry<String, Integer> entry : hashMap.entrySet()) {
			Integer value = entry.getValue();
			String key = entry.getKey();
			System.out.println(key + "\t" + value);
			allwordCount+=value;
		}
		System.out.println("所有单词出现的总次数："+allwordCount);
		System.out.println("所有去重后的单词的个数："+hashMap.size());
	}

	@Override
	public void declareOutputFields(OutputFieldsDeclarer declarer) {
		// TODO Auto-generated method stub

	}

}
