package com.example.java.kafka.kafkaclients;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

public abstract class MessageConsumer {
    private static final ExecutorService threadPool = Executors.newFixedThreadPool(1);

    public static void main(String[] args) {
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "43.139.50.31:9092"); // bootstrap.servers
        props.put(ConsumerConfig.GROUP_ID_CONFIG, "consumer-api-group"); // group.id
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); // enable.auto.commit
        props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); // auto.commit.interval.ms
        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); // auto.offset.reset
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); // key.serializer
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); // value.serializer
        // 创建消费者并指定消费主题
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        List<String> subscribedTopics = new ArrayList<>();
        subscribedTopics.add("producer-api-topic");
        consumer.subscribe(subscribedTopics);
        // 创建消费业务并开始消费监听
        MessageConsumer messageConsumer = new MessageConsumer() {
            @Override
            public void consumeMsg(String consumerMessage, String consumerName) throws Exception {
                System.out.println("Consumer name: " + consumerName + ", Consumer time: " + new Date().getTime() + ", Consume message: " + consumerMessage);
            }
        };
        messageConsumer.startListen(consumer, "consumer-api");
    }

    /**  
     * 消费监听
     */
    private void startListen(KafkaConsumer<String, String> kafkaConsumer, String consumerName) {
        threadPool.submit(() -> {
            while (true) {
                try {
                    ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofSeconds(3));
                    if (records != null && !records.isEmpty()) {
                        for (ConsumerRecord<String, String> record : records) {
                            Optional<String> kafkaMessage = Optional.ofNullable(record.value());
                            if (kafkaMessage.isPresent()) {
                                consumeMsg(kafkaMessage.get(), consumerName);
                            }
                        }
                    }
                    System.out.println("No records found...");
                } catch (Exception e) {
                    System.out.println(e.getMessage());
                }
            }
        });
    }

    /**
     * 核心逻辑, 由子类继承实现
     */
    public abstract void consumeMsg(String consumerMessage, String consumerName) throws Exception;
}