package trident.wordcount;

import org.apache.storm.trident.operation.BaseAggregator;
import org.apache.storm.trident.operation.TridentCollector;
import org.apache.storm.trident.operation.TridentOperationContext;
import org.apache.storm.trident.tuple.TridentTuple;
import org.apache.storm.tuple.Values;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;

public class WordCountAggregator extends BaseAggregator<Object> {
    private Map<String, Integer> set;
    private TridentOperationContext context;
    private static final Logger logger = LoggerFactory.getLogger(WordCountAggregator.class);

    @Override
    public void prepare(Map<String, Object> conf, TridentOperationContext context) {
        this.context = context;
    }

    @Override
    public Object init(Object batchId, TridentCollector collector) {
        set = new HashMap<>();
        return null;
    }

    @Override
    public void aggregate(Object val, TridentTuple tuple, TridentCollector collector) {
        String word = tuple.getStringByField("word");

        if (set.containsKey(word)) {
            set.put(word, set.get(word)+1);
        } else {
            set.put(word, 1);
        }

        logger.info("partition by : " + context.getPartitionIndex() + ", word : " + word + ", count : " + set.get(word));
        collector.emit(new Values(word, set.get(word)));
    }

    @Override
    public void complete(Object val, TridentCollector collector) {

    }
}
