package com.example.demo.kafka;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.common.errors.WakeupException;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.time.Duration;
import java.util.*;
import java.util.concurrent.*;

@Component
@Slf4j
public class KafkaManager {
    @Value("${spring.kafka.bootstrap-servers}")
    private String bootstrapServers;
    /**
     * 监听中的消费者
     */
    private static final Map<String, Consumer<String, byte[]>> listeningConsumers = new ConcurrentHashMap<>();

    /**
     * 已经注册的消费者
     */
    private List<String> consumerList = new ArrayList<>();

    @Autowired
    private CdcMessageConsumer cdcMessageConsumer;

    private final ExecutorService consumerExecutor = Executors.newCachedThreadPool();
    private final Map<String, Future<?>> consumerFutures = new ConcurrentHashMap<>();


    public void removeConsumer(String bootstrapServers, String topic) {
        String key = bootstrapServers + "-" + topic;
        if (!consumerList.contains(key)) {
            throw new RuntimeException("没有该消费者!");
        }
        if (listeningConsumers.containsKey(key)) {
            throw new RuntimeException("该消费者正在运行中,请先停止!");
        }
        consumerList.add(key);
    }


    public void addTopic(String topic) {
        if (consumerList.contains(topic)) {
            return;
        }

        Properties props = new Properties();
        log.info(bootstrapServers);
        props.put("bootstrap.servers", bootstrapServers);
        props.put("group.id", "dynamic-subscription-group");
        props.put("enable.auto.commit", "true");
        props.put("auto.commit.interval.ms", "1000");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer");
        props.put("auto.offset.reset", "earliest");

        Consumer<String, byte[]> consumer = new KafkaConsumer<>(props);
        consumer.subscribe(Collections.singletonList(topic));
        listeningConsumers.put(topic, (Consumer<String , byte[]>) consumer);
        consumerList.add(topic);

        Future<?> future = consumerExecutor.submit(() -> {
            try {
                while (!Thread.currentThread().isInterrupted()) {
                    ConsumerRecords<String, byte[]> records = consumer.poll(Duration.ofMillis(100));
                    for (ConsumerRecord<String, byte[]> record : records) {
                        cdcMessageConsumer.doConsumer(record);
                    }
                }
            } catch (WakeupException ignored) {
            } finally {
                consumer.close();
                log.info("Consumer for topic {} closed.", topic);
            }
        });

        consumerFutures.put(topic, future);
    }

    public void removeTopic(String topic) {
        Consumer<String, byte[]> consumer = listeningConsumers.remove(topic);
        Future<?> future = consumerFutures.remove(topic);

        if (consumer == null || future == null) {
            consumerList.remove(topic);
            return;
        }

        consumer.wakeup(); // safe way to break poll()
        future.cancel(true); // interrupt the thread
        log.info("Removed topic {}", topic);
        consumerList.remove(topic);
    }

}
