package com.doit.day02;

import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;

public class KafkaConsumerUtils {
    public static KafkaConsumer<String, String> getConsumer(String ... topic){
        Map<String, Object> map = new HashMap<>();
        //必须配置的
        map.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
        map.put("value.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
        map.put("bootstrap.servers","linux01:9092");
        map.put("group.id","g01");
        //选配的
        map.put("allow.auto.create.topics","true");
        //设置从哪里开始读取数据  latest  最新的
        map.put("auto.offset.reset","earliest");
        //允许自动提交偏移量  __consumer_offsets
        map.put("enable.auto.commit","true");
        //自动提交偏移量的间隔时间
        map.put("auto.commit.interval.ms","10000");
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(map);
        //告诉他，干啥呀？消费哪一个topic中的数据
        consumer.subscribe(Arrays.asList(topic));


        return consumer;
    }
}
