package com.yuan.kafkastudy.consumer;

import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.util.JsonFormat;
import com.yuan.kafkastudy.proto.DemoProto;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;

public class KafkaOffsetConsumer {
    public static void main(String[] args) throws InvalidProtocolBufferException {
        //创建配置
        Map<String,Object> consumerConfig = new HashMap();
        consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"localhost:9092");
        consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
        //配置偏移量
        //consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");//从0开始
        //取消自动提交偏移量的操作
        consumerConfig.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
        // TODO 配置属性: 消费者组
        consumerConfig.put("group.id", "atguigu");
        //创建消费者对象
        KafkaConsumer<String, byte[]> kafkaConsumer = new KafkaConsumer<>(consumerConfig);
        //从指定位置3开始
        boolean flag = true;
        while (flag) {
            kafkaConsumer.poll(Duration.ofMillis(100));
            Set<TopicPartition> assignment = kafkaConsumer.assignment();
            for (TopicPartition topicPartition : assignment) {
                if (topicPartition.topic().equals("test")) {
                    kafkaConsumer.seek(topicPartition, 3);
                    flag = false;
                }
            }
        }
        //订阅主题
        kafkaConsumer.subscribe(Collections.singleton("test"));
        //获取数据
        while (true) {
            ConsumerRecords<String, byte[]> records = kafkaConsumer.poll(100);
            for (ConsumerRecord<String, byte[]> record : records) {
                System.out.println("-----key-----:"+record.key());
                System.out.println("-----偏移量offset-----:"+record.offset());
                byte[] value = record.value();
                DemoProto.Demo demo = DemoProto.Demo.parseFrom(value);
                String demoPrint = JsonFormat.printer().print(demo);
                System.out.println("-----value-----:"+demoPrint);
                kafkaConsumer.commitAsync();//异步提交
                kafkaConsumer.commitSync();//同步提交
            }
        }
        //关闭消费者
        //kafkaConsumer.close();
    }
}
