package com.example.provider.kafka;

import com.example.api.common.trace.TraceIdUtil;
import com.example.api.common.message.KafkaMessage;
import org.apache.kafka.clients.consumer.ConsumerInterceptor;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;

import java.util.Map;

public class KafkaConsumerTraceIdInterceptor implements ConsumerInterceptor<String, String> {
    private static final Logger logger = LoggerFactory.getLogger(KafkaConsumerTraceIdInterceptor.class);
    private static final ObjectMapper objectMapper = new ObjectMapper();

    @Override
    public ConsumerRecords<String, String> onConsume(ConsumerRecords<String, String> records) {
        try {
            records.forEach(record -> {
                try {
                    String value = record.value();
                    KafkaMessage<?> message = objectMapper.readValue(value, KafkaMessage.class);
                    if (message.getTraceId() != null) {
                        TraceIdUtil.setTraceId(message.getTraceId());
                        logger.info("Kafka consumer traceId: {}", message.getTraceId());
                    }
                } catch (Exception e) {
                    logger.error("Failed to extract traceId from Kafka message", e);
                }
            });
        } catch (Exception e) {
            logger.error("Failed to process Kafka messages", e);
        }
        return records;
    }

    @Override
    public void onCommit(Map<TopicPartition, OffsetAndMetadata> offsets) {
    }

    @Override
    public void close() {
    }

    @Override
    public void configure(Map<String, ?> configs) {
    }
} 