package org.gxlu.com.storm.wc;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;




import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.tuple.Fields;import org.apache.storm.tuple.Values;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


@SuppressWarnings("serial")
public class WcSpout  extends BaseRichSpout {

	private SpoutOutputCollector collector = null ;
	
	private List<String> words = new ArrayList<>();
	private static Logger log = LoggerFactory.getLogger(WcSpout.class) ;
	@Override
	public void open(@SuppressWarnings("rawtypes") Map conf, TopologyContext context,
			SpoutOutputCollector collector_) {
		collector = collector_ ;
		
		words.add("hello world spark sparkstreaming");
		words.add("world hello sparksql");
		words.add("spark storm hadoop hive");
		words.add("storm world spark sparkstreaming");
		words.add("hive storm hadoop hive");
		words.add("hadoop world spark sparkstreaming");
		words.add("hbase hadoop hive storm");
		words.add("flume storm hadoop hive");
		words.add("flink storm sparkstreaming hello");
	}

	@Override
	public void nextTuple() {
		 words.stream().forEach(w->{
			collector.emit(new Values(w)) ;
		});
		 log.info("WcSpount-----threadId:"+Thread.currentThread().getId()+"list.hashcode:"+words.getClass().hashCode());
		   
	}

	@Override
	public void declareOutputFields(OutputFieldsDeclarer declarer) {
		declarer.declare(new Fields("word"));
		
	}

	 

}
