package com.doit.doit47;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.util.Properties;

public class MyKafkaUtils {
    public static KafkaConsumer<String,String> getConsumer(String group_id,boolean enable_auto_commit,String auto_offset_reset){
        //1.读取kafka中的数据
        //设置properties文件
        Properties props = new Properties();
        props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,MyKafkaConfig.CONSUMER_HOST_NAME);
        props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.setProperty(ConsumerConfig.GROUP_ID_CONFIG,group_id);
        //选择配置的
        props.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,"false");
//        props.setProperty(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,"10000");
        //1.代码中有没有设置过从哪里开始读
        //2.再看__consumer_offsets这个topic中有没有记录
        //3.再去看AUTO_OFFSET_RESET_CONFIG 是earliest 还是latest
        props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"earliest");

        // 创建kafka消费者的对象
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);

        return consumer;
    }
}
