/**
 * Copyright (C), 2015-2018, XXX有限公司
 * FileName: ConsumerSpout
 * Author:   An-Il
 * Date:     2018/11/18 16:45
 * Description: 生产消息spout
 * History:
 * <author>          <time>          <version>          <desc>
 * 作者姓名           修改时间           版本号              描述
 */
package com.blog.storm.example.integration.kafka.producer;

import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values;
import org.apache.storm.utils.Utils;

import java.util.Map;
import java.util.Random;
import java.util.UUID;

/**
 * 〈一句话功能简述〉<br>
 * 〈生产消息spout〉
 *
 * @author An-Il
 * @create 2018/11/18
 * @since 1.0.0
 */
public class ConsumerSpout extends BaseRichSpout {

    SpoutOutputCollector collector;
    Random random;

    @Override
    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
        this.collector = collector;
        this.random = new Random();
    }

    @Override
    public void nextTuple() {
        Utils.sleep(100);
        String[] words = new String[]{"hadoop", "spark", "kafka", "flume", "storm", "hbase", "hive", "zookeeper"};
        String message = words[random.nextInt(words.length)];
        String key = "key-" + message;

        //将数据发送到下游
        this.collector.emit(new Values(key, message), UUID.randomUUID());
        System.out.println("send key:" + key + " message:" + message);
    }

    @Override
    public void declareOutputFields(OutputFieldsDeclarer declarer) {
        declarer.declare(new Fields("key", "message"));
    }
}