package big.kafka.consumer;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Properties;

public class CustomConsumer {

    public static void main(String[] args) {
        Properties po = new Properties();
        po.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"hadoop129:9092");
        po.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        po.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        po.put(ConsumerConfig.GROUP_ID_CONFIG,"test"); //消费者组id  只要此id相同就属于同一个消费者组
        po.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,"false");  //关闭自动提交 默认为true
        //创建一个消费者
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(po);
        consumer.subscribe(Arrays.asList("first")); //订阅的主题
        //调用poll 到分区拉取数据
        while(true){
            ConsumerRecords<String, String> consumerRecords = consumer.poll(100); //每一百秒到分区拉取一次数据
            for (ConsumerRecord<String, String> consumerRecord:consumerRecords){
                System.out.printf("offset = %d, partion = %s, value = %s%n", consumerRecord.offset(), consumerRecord.partition(), consumerRecord.value());
            }
            consumer.commitAsync();
        }

    }

}
