package com.example.springboottest.example.kafka.simple;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.Arrays;
import java.util.Collections;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicBoolean;

/**
 * KatkaProducer 是线程安全的，然而 KafkaConsumer 却是非线程安全 KafkaConsumer
 * 定义了一个acquire()方法，用来检测当前是否只有一个线程在操作，若有其他线程正在操作则
 * 会抛出 ConcurrentModifcationException 异常
 */
public class KafkaConsumerAnalysis {
    public static final String brokerList = "localhost:9092";
    public static final String topic = "top-create";
    public static final String groupId = "group.demo5";
    public static final AtomicBoolean isRunning = new AtomicBoolean(true);

    public static Properties initConfig() {
        Properties properties = new Properties();
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        /**
         * bootstrap.servers ：该参数用来指定生产者客户端连接 Kafka 集群所需的 broker
         * 地址清单，具体的内容格式为 hostl:portl,host2:port2 ，可以设置多个地址，中间以逗号隔开
         */
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList);
        /**
         * 消费者隶属的消费组的名称 默认值为“”。 如果设置为空，则会报出异常
         * Exception in thread "main” org.apache.kafka.common.errors.InvalidGroupldException:
         * The configured groupld is invalid 一般而言，这个参数需要设置成具有一定的业务意义的名称
         */
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
        /**
         * 自定义消费者拦截器
         */
//        properties.put(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG, ConsumerInterceptorTTL.class.getName());
        properties.put(ConsumerConfig.CLIENT_ID_CONFIG, "consumer.client.id.demo");
        return properties;
    }

    public static void main(String[] args) {
        Properties props = initConfig();

        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        /**
         * 消费者使用集合的方式（ subscribe(Collection ）来订阅主题
         * 一、支持正则表达式
         * consumer . subscribe (Pattern . compile (” topic . * ” ));
         * 二、只订阅 topic-demo 主题中分区编号为0的分区
         * consumer.assign(Arrays.asList(new TopicPartition (” topic-demo ” , 0))) ;
         */
        consumer.subscribe(Arrays.asList(topic));
        try {
            while (isRunning.get()) {
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1000));
                if (records != null)
                    for (ConsumerRecord<String, String> record : records) {
                        System.out.println("topic =" + record.topic() + "partition = "
                                + record.partition() + ", offset = " + record.offset());
                        System.out.println("key =" + record.key() + ", value = " + record.value());
                        //do something to process record.
                    }
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            consumer.close();
        }
    }
}
