package com.yuan.kafkastudy.consumer;

import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.util.JsonFormat;
import com.yuan.kafkastudy.proto.DemoProto;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.util.Collections;
import java.util.HashMap;
import java.util.Map;

public class KafkaConsumer {
    public static void main(String[] args) throws InvalidProtocolBufferException {
        //创建配置
        Map<String,Object> consumerConfig = new HashMap();
        consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"localhost:9092");
        consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
        // TODO 配置属性: 消费者组
        consumerConfig.put("group.id", "atguigu");
        //创建消费者对象
        org.apache.kafka.clients.consumer.KafkaConsumer<String, byte[]> kafkaConsumer = new org.apache.kafka.clients.consumer.KafkaConsumer<>(consumerConfig);
        //订阅主题
        kafkaConsumer.subscribe(Collections.singleton("test"));
        //获取数据
        while (true) {
            ConsumerRecords<String, byte[]> records = kafkaConsumer.poll(100);
            for (ConsumerRecord<String, byte[]> record : records) {
                System.out.println("-----key-----:"+record.key());
                System.out.println("-----偏移量offset-----:"+record.offset());
                byte[] value = record.value();
                DemoProto.Demo demo = DemoProto.Demo.parseFrom(value);
                String demoPrint = JsonFormat.printer().print(demo);
                System.out.println("-----value-----:"+demoPrint);
            }
        }
        //关闭消费者
        //kafkaConsumer.close();
    }
}
