package com.gjy.kafka.resolve.consumer;

import com.gjy.kafka.resolve.config.KafkaConsumerConfig;
import com.gjy.kafka.resolve.producer.TransactionalProducer;
import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.time.Duration;
import java.util.*;

/**
 * @author gjy
 * @version 1.0
 * @since 2025-10-13 15:19:56
 */
public class TransactionalConsumer implements AutoCloseable {
    private static final Logger logger = LoggerFactory.getLogger(TransactionalConsumer.class);
    private final Consumer<String, String> consumer;
    private final TransactionalProducer txnProducer; // 用于写入下游 topic

    public TransactionalConsumer(String groupId, String txnProducerId) {
        Properties consumerProps = KafkaConsumerConfig.buildConfig(groupId);
        this.consumer = new KafkaConsumer<>(consumerProps);
        this.txnProducer = new TransactionalProducer(txnProducerId);
    }

    /**
     * 消费 ATopic → 处理 → 写入 BTopic（Exactly‑Once）
     */
    public void consumeAndForward(String sourceTopic, String destTopic) {
        consumer.subscribe(Collections.singletonList(sourceTopic));
        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1000));
            if (records.isEmpty()) {
                continue;
            }

            // -------- 开始事务 --------
            try {
                txnProducer.getProducer().beginTransaction();

                // 构造要写入的记录（示例：直接转发，实际业务可在这里加工）
                List<ProducerRecord<String, String>> toSend = new ArrayList<>();
                for (ConsumerRecord<String, String> rec : records) {
                    ProducerRecord<String, String> outRec =
                            new ProducerRecord<>(destTopic, rec.key(), rec.value());
                    toSend.add(outRec);
                }
                // 发送到 BTopic（仍在同一事务中）
                for (ProducerRecord<String, String> out : toSend) {
                    txnProducer.getProducer().send(out);
                }

                // 将当前消费的 offset 作为事务的一部分提交
                Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
                for (TopicPartition tp : records.partitions()) {
                    List<ConsumerRecord<String, String>> partitionRecords = records.records(tp);
                    long lastOffset = partitionRecords.get(partitionRecords.size() - 1).offset();
                    offsets.put(tp, new OffsetAndMetadata(lastOffset + 1));
                }
                // 把 offset 同时写入 __consumer_offsets
                txnProducer.getProducer().sendOffsetsToTransaction(offsets, consumer.groupMetadata());

                // 提交事务 → 消费进度 & 生产写入一起成功
                txnProducer.getProducer().commitTransaction();
                logger.info("事务提交成功，sourceTopic={}, destTopic={}, recordCount={}",
                        sourceTopic, destTopic, records.count());
            } catch (Exception ex) {
                logger.error("事务处理异常，回滚事务", ex);
                txnProducer.getProducer().abortTransaction();
                // 此时 offset 未提交，下次 poll 将重新消费相同记录 → 实现 Exactly‑Once
            }
        }
    }

    @Override
    public void close() {
        consumer.wakeup();
        consumer.close();
        txnProducer.close();
    }
}