package trident.wordcount;

import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.trident.TridentTopology;
import org.apache.storm.trident.testing.FixedBatchSpout;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values;
import trident.partition.PrintFunction;

public class TridentWordCount {
    public static void main(String[] args) throws Exception {
        FixedBatchSpout spout = new FixedBatchSpout(
                new Fields("sentence"), 3,
                new Values("hello word"),
                new Values("hello hive"),
                new Values("Hello storm"),
                new Values("I like eating watermelon")
        );
        spout.setCycle(false);

        TridentTopology tridentTopology = new TridentTopology();
        tridentTopology.newStream("spout", spout)
                .shuffle()
                .each(new Fields("sentence"), new SplitFunction(), new Fields("word"))
                .parallelismHint(6) /* 与shuffle */
                .partitionBy(new Fields("word"))
                .partitionAggregate(new Fields("word"), new WordCountAggregator(), new Fields("word", "count"))
                .parallelismHint(8) /* 与前面最近的重分区操作匹配 */
                .global() /* 若不设置，则默认是未分组，shuffle */
                .each(new Fields("word", "count"), new PrintFunction(), new Fields())
                .parallelismHint(1) /* 可省略，默认是1 */
        ;

        Config config = new Config();
        LocalCluster localCluster = new LocalCluster();
        localCluster.submitTopology("word-count", config, tridentTopology.build());
    }
}
