package com.yang.consumer.kafka.manual;

import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Pattern;

public class KafkaConsumerDemo {
    public static final String bootstrapServers = "192.168.137.31:9092,192.168.137.32:9092,192.168.137.33:9092";
    public static final String topic = "topic-demo";
    public static final String groupId = "consumer-demo";
    public static final String clientId = "consumer-demo-1";
    public static final AtomicBoolean isRunning = new AtomicBoolean(true);

    public static Properties initConfig() {
        Properties properties = new Properties();
        // kafka集群broker地址
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        // key反序列化器
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        // value序列化器
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        // 消费组 group.id
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
        // 客户端id 不设置，默认 consumer-1，consumer-2
        properties.put(ConsumerConfig.CLIENT_ID_CONFIG, clientId);
        // 手动提交位移
        // properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
        // 找不到消费位移，位移越界.重置位移 默认：latest  可选：earliest latest none（抛异常）
        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
        return properties;
    }

    /**
     *  暂定方法 consumer.pause();
     *  恢复方法 consumer.resume();
     *  停止消费 consumer.wakeup();
     * @param args
     */
    public static void main(String[] args) {
        Properties properties = initConfig();
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties);
        // 订阅 多次订阅，最后一次为准 subscribe方式订阅会自动再均衡
        subscribe(consumer);
        // 正则表达式订阅
        // subscribe2(consumer);
        // 带再均衡监听
        // subscribe3(consumer);
        // 主题分区方式订阅
        // assign(consumer);

        // 取消订阅
        // unSubscribe(consumer);

        try {
            while (isRunning.get()) {
                ConsumerRecords<String, String> consumerRecords = consumer.poll(Duration.ofMillis(1000));
                for (ConsumerRecord record : consumerRecords) {
                    System.out.println(record.toString());
                }
                // 按分区消费
//                for (TopicPartition partition : consumerRecords.partitions()) {
//                    for (ConsumerRecord<String, String> record : consumerRecords.records(partition)) {
//                        System.out.println(record.toString());
//                    }
//                }
                // 按主题消费 consumerRecords.records(topic)
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            consumer.close();
        }
    }

    private static void subscribe(KafkaConsumer<String, String> consumer) {
        consumer.subscribe(Arrays.asList(topic));
    }

    // 取消订阅，三种方式选一种即可
    private static void unSubscribe(KafkaConsumer<String, String> consumer) {
        consumer.unsubscribe();
        // consumer.subscribe(Arrays.asList());
        // consumer.assign(new ArrayList<TopicPartition>());
    }

    private static void subscribe2(KafkaConsumer<String, String> consumer) {
        // 正则
        consumer.subscribe(Pattern.compile("topci-.*"));
    }

    private static void subscribe3(KafkaConsumer<String, String> consumer) {
        // 再均衡监听
        consumer.subscribe(Arrays.asList(topic), new ConsumerRebalanceListener() {
            @Override
            public void onPartitionsRevoked(Collection<TopicPartition> collection) {

            }

            @Override
            public void onPartitionsAssigned(Collection<TopicPartition> collection) {

            }
        });
    }

    // 主题分区方式订阅
    private static void assign(KafkaConsumer<String, String> consumer) {
        // 订阅主题全部分区
        // 获取主题分区信息
        List<PartitionInfo> partitionInfos = consumer.partitionsFor(topic);
        List<TopicPartition> topicPartitionList = new ArrayList<>();
        if (partitionInfos != null) {
            for (PartitionInfo info : partitionInfos) {
                topicPartitionList.add(new TopicPartition(info.topic(), info.partition()));
            }
        }
        consumer.assign(topicPartitionList);

        // 自定义 主题分区
        // consumer.assign(Arrays.asList(new TopicPartition(topic, 0)));
    }


}
