import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
//导入kafka依赖包
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values;

public class spout2 extends BaseRichSpout {
    private int begin_2=0;
    private static final long serialVersionUID = -6574998944310422274L;
    private SpoutOutputCollector collector;
    private ConsumerConnector consumer;
    private String topic;

    public spout2() {}

    public spout2(String topic) {
        this.topic = topic;
    }

    public void nextTuple() {    }

    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
        this.collector = collector;
    }

    public void ack(Object msgId ) {    }

    public void activate() {
        consumer =kafka.consumer.Consumer.createJavaConsumerConnector(createConsumerConfig());
        Map<String,Integer> topickMap = new HashMap<String, Integer>();
        topickMap.put(topic, 1);

        System.out.println("*********Results********topic:"+topic);
        try{
        Map<String, List<KafkaStream<byte[],byte[]>>>  streamMap=consumer.createMessageStreams(topickMap);
        KafkaStream<byte[],byte[]>stream = streamMap.get(topic).get(0);
        ConsumerIterator<byte[],byte[]> it =stream.iterator();
        while(it.hasNext()) {
            String value = new String(it.next().message());
            //System.out.println("storm接收到来自kafka的消息------->" + value);
            //对kafka传来的消息进行切割划分
            String regEx = "[\n`~!@#$%^&*()+=|{}':;,'\\[\\]<>/?~！@#￥%……&*()+|{}【】‘；：”“’。， 、？]";
            value = value.replaceAll(regEx, "");
            //System.out.println(value);
            String[] words = value.split("\"\"");
            //获取数据
            String receiveplace = words[12];
            String guest_name = words[10];
            String total = words[20];
            String address = words[12];
            String pay=words[14];
            begin_2++;
            //将消息提交给下一个bolt
            collector.emit(new Values(receiveplace, guest_name, total, address, pay,begin_2));
            }
        }catch (Exception e){}
    }

    private static ConsumerConfig createConsumerConfig() {
        Properties props = new Properties();
        // 设置zookeeper的链接地址
        props.put("zookeeper.connect","192.168.128.140:2181,192.168.128.141:2181,192.168.128.142:2181,192.168.128.143:2181");
        // 设置group id
        props.put("group.id", "1");
        // kafka的group 消费记录是保存在zookeeper上的, 但这个信息在zookeeper上不是实时更新的, 需要有个间隔时间更新
        props.put("auto.commit.interval.ms", "1000");
        props.put("zookeeper.session.timeout.ms","10000");
        return new ConsumerConfig(props);
    }

    public void close() {    }

    public void deactivate() {    }

    public void fail(Object msgId) {    }

    public void declareOutputFields(OutputFieldsDeclarer declarer) {
        //申明emit变量
        declarer.declare(new Fields("receiveplace","guest_name","total","address","pay","begin_2"));
    }

    public Map<String, Object> getComponentConfiguration() {
        System.out.println("getComponentConfiguration被调用");
        //设置接入的kafka topic
        topic="order_detail_end";
        return null;
    }
}