package com.test.study.ToolsUtils.strom.trident;

import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.trident.TridentTopology;
import org.apache.storm.tuple.Fields;

/**
 * 使用 Trident 单词计数
 * 
 * @author huyong
 *
 */
public class TridentTopologyWordCount {

	public static void main(String[] args) {
		TridentTopology topology = new TridentTopology();
		MyFixedBatchSpout myFixedBatchSpout = new MyFixedBatchSpout();
		topology.newStream("toput01", myFixedBatchSpout)
				.each(new Fields("line"), new SplitFunction(), new Fields("work")).groupBy(new Fields("work"))// 根据字段进行分组
				.aggregate(new Fields("work"), new MyAggregator(), new Fields("map"))
				.each(new Fields("map"), new PrintfFunction(), new Fields(""));// 聚合

		LocalCluster localCluster = new LocalCluster();
		Config config = new Config();
		config.setMaxSpoutPending(100);//表示建立一个中间的队列  这个队列中最大个数为100 即  Supt最大生产100个当bolt消费一个sput才生产一个
		//限制内存中没有被处理的toup
		localCluster.submitTopology("TridentTopologyWordCount", config, topology.build());

	}

}
