package org.example.kafkastudy.config;

import com.alibaba.fastjson.JSON;
import org.apache.kafka.clients.consumer.ConsumerInterceptor;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.header.Header;

import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

public class OrderConsumeInterceptor<K, V> implements ConsumerInterceptor<K, V> {
    @Override
    public ConsumerRecords<K, V> onConsume(ConsumerRecords<K, V> records) { // 消费消息之前执行
        System.out.println("订单服务消息，onConsume，开始校验 token，records：" + records);

        List<ConsumerRecord<K, V>> list = new ArrayList<>();

        for (ConsumerRecord<K, V> record : records) {
            String token = null;
            for (Header header : record.headers()) {
                if ("token".equals(header.key())) {
                    token = new String(header.value());
                }
            }

            if ("order-token".equals(token)) {
                list.add(record);
                System.out.println("订单服务消息，onConsume，token 校验通过，record：" + record);
            } else {
                System.out.println("订单服务消息，onConsume，token 校验不通过，丢弃，record：" + record);
            }
        }

        return new ConsumerRecords<>(list.stream().collect(Collectors.groupingBy(record -> new TopicPartition(record.topic(), record.partition()))));
    }

    @Override
    public void onCommit(Map<TopicPartition, OffsetAndMetadata> offsets) { // 消费成功之后，提交 offset 之前执行
        System.out.println("订单服务消息，onCommit，offsets：" + offsets);
    }

    @Override
    public void close() {

    }

    @Override
    public void configure(Map<String, ?> configs) {

    }
}
