package wordcount;

import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.UUID;

/**
 * 三种语义 执行成功的次数
 * 1. at most：最多一次
 * 2. at least：最少一次
 * 3. exact once：精确一次
 */
public class SentenceSpout extends BaseRichSpout {
    private SpoutOutputCollector collector;
    private String[] sentences = new String[]{"as as dji fjk", "as fh sf", "fsjhi hk osf"
    };
    private Random random = new Random();
    private Logger logger = LoggerFactory.getLogger(SentenceSpout.class);
    private Map<UUID, Values> maps;

    @Override
    public void open(Map<String, Object> map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) {
        collector = spoutOutputCollector;
        maps = new HashMap<>();
    }

    @Override
    public void nextTuple() {
        String sentence = sentences[random.nextInt(sentences.length)];

        // 创建一个全局唯一的消息id
        UUID msgId = UUID.randomUUID();
        maps.put(msgId, new Values(sentences));
        collector.emit(new Values(sentence), msgId);
        logger.info("Emitting tuple : {}", sentence);
//        Utils.sleep(1000);
    }

    @Override
    public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
        outputFieldsDeclarer.declare(new Fields("sentence"));
    }

    @Override
    public void ack(Object msgId) {
        // 如果消息处理成功
        maps.remove(msgId);
    }

    @Override
    public void fail(Object msgId) {
        // 消息处理失败
        collector.emit(maps.get(msgId), msgId);
    }

}
