package plus.chendd.kafka.demo.consumer;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;

import java.time.Duration;
import java.util.Collections;
import java.util.Properties;

/**
 * 这是一个设置群组的消费者
 */
public class GroupConsumer001 {
  public static void main(String[] args) {
    Properties properties = new Properties();
    // 设置kafka 的服务器连接地址
    properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.56.12:9092");
    // 设置 String 的序列化
    properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    properties.put("value.serializer", StringSerializer.class);
    // 设置群组
    properties.put(ConsumerConfig.GROUP_ID_CONFIG, "ConsumerOffsets");
    
    
    // 创建kafka 的消费者
    KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties);
    try {
      consumer.subscribe(Collections.singletonList("order01"));
      while (true) {
        // 每隔1s 进行信息拉取
        ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(1));
        for (ConsumerRecord<String, String> record : records) {
          String key = record.key();
          String value = record.value();
          int partition = record.partition();
          
          System.out.println("接收到消息:" + ",partition =" + partition + ", key = " + key + ", value = " + value);
        }
      }
    } catch (Exception e) {
      consumer.close();
    }
  }
}
