package com.zengjianjun.storm.demo.wordcount;

import cn.hutool.json.JSONUtil;
import com.google.common.collect.Lists;
import com.zengjianjun.storm.demo.StormDemoApplication;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values;
import org.apache.storm.utils.Utils;
import org.springframework.beans.factory.annotation.Value;

import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Random;

/**
 * 数据源
 *
 * @author zengjianjun
 */
@Slf4j
public class DataSourceSpout extends AbstractBaseRichSpout {

    private final List<String> list = Lists.newArrayList("Spark", "Hadoop", "HBase", "Storm", "Flink", "Hive");

    private SpoutOutputCollector spoutOutputCollector;

    @Override
    public void open(Map<String, Object> conf, TopologyContext context, SpoutOutputCollector collector) {
        log.info("conf: {}", JSONUtil.toJsonStr(conf));
        log.info("context: {}", JSONUtil.toJsonStr(context));
        super.run();
        this.spoutOutputCollector = collector;
    }

    @Override
    public void nextTuple() {
        String lineData = this.productData();
        log.info("接收数据：{}", lineData);
        spoutOutputCollector.emit(new Values(lineData));
    }

    @Override
    public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
        outputFieldsDeclarer.declare(new Fields("lineData"));
    }

    /**
     * 模拟数据
     */
    private String productData() {
        Collections.shuffle(list);
        Random random = new Random();
        int endIndex = random.nextInt(list.size()) % (list.size()) + 1;
        Utils.sleep(5000);
        return StringUtils.join(list.toArray(), "\t", 0, endIndex);
    }
}
