package cn.chenchendemospringboot.stock_demo.utils;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.CommitFailedException;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.springframework.stereotype.Component;

import java.time.Duration;
import java.util.Arrays;
import java.util.Collections;
import java.util.Properties;
import java.util.function.Consumer;

@Component
@Slf4j
public class KafkaUtils {
    private static KafkaConsumer<String, String> consumer;

    static {
        //消费者配置
        Properties consumerProps = new Properties();
        consumerProps.put("bootstrap.servers", "localhost:9092");
        consumerProps.put("key.deserializer",
                "org.apache.kafka.common.serialization.StringDeserializer");
        consumerProps.put("value.deserializer",
                "org.apache.kafka.common.serialization.StringDeserializer");
        consumerProps.put("group.id", "VoucherGroup");
        //关闭自动提交offset
        consumerProps.put("enable.auto.commit", "false");
        consumerProps.put("auto.offset.reset", "earliest");
        consumer = new KafkaConsumer<>(consumerProps);
    }

    /**
     * 消费消息
     *
     * @param c 回调函数，处理消息
     */
    public static void consume(Consumer<ConsumerRecord<String, String>> c) {
        consumer.subscribe(Arrays.asList("EVENTPUBLISH_CREATE"));
        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(100);
            if (!records.isEmpty()) {
                for (ConsumerRecord<String, String> record : records) {
                    log.info("接收到消息，ConsumerRecord={}", record);
                    c.accept(record);
                }
                try {
                    //同步手动提交offset
                    log.info("消息消费成功！");
                    consumer.commitSync();
                } catch (CommitFailedException e) {
                    log.error("Kafka消费者提交offset失败", e);
                }
            } else {
                log.debug("没有接收到任何消息");
            }
        }
    }

    public static void consume2(Consumer<ConsumerRecord<String, String>> c) {
        consumer.subscribe(Collections.singletonList("test10"));

        // 重置消费者偏移量到分区 0 的起始位置
        TopicPartition tp = new TopicPartition("test10", 0);
        consumer.seek(tp, 0L);

        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));

            if (!records.isEmpty()) {
                for (ConsumerRecord<String, String> record : records) {
                    log.debug("接收到消息，ConsumerRecord={}", record);
                    c.accept(record);
                }

                try {
                    log.info("消息消费成功！");
                    consumer.commitSync();
                } catch (CommitFailedException e) {
                    log.error("Kafka消费者提交offset失败", e);
                }
            } else {
                log.debug("没有接收到任何消息");
            }
        }
    }

}
