package yuan.consumer.interceptor;

import org.apache.kafka.clients.consumer.ConsumerInterceptor;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;

/**
 * @version: 1.0
 * @Description: TODO
 * @author: zZmH
 * @date: 2020/09/09 13:00
 */


public class ConsumerInterceptorTTL implements ConsumerInterceptor<String,String> {
    public static final long EXPLAIN_INTERVAL = 10 * 1000;




    //在poll()返回之前调用
    @Override
    public ConsumerRecords<String, String> onConsume(ConsumerRecords<String, String> consumerRecords) {

        long now = System.currentTimeMillis();
        final HashMap<TopicPartition, List<ConsumerRecord<String, String>>> newRecords = new HashMap<>();

        consumerRecords.partitions().forEach(
                (topicPartition) -> {
                    final List<ConsumerRecord<String, String>> filterRecordsPerPartition = consumerRecords.records(topicPartition).stream().filter(
                            (record) -> (now - record.timestamp()) <= EXPLAIN_INTERVAL
                    ).collect(Collectors.toList());

                    if (!filterRecordsPerPartition.isEmpty()) {
                        newRecords.put(topicPartition, filterRecordsPerPartition);
                    }
                }

        );


        return new ConsumerRecords<>(newRecords);
    }

    //提交完消费位移之后触发
    @Override
    public void onCommit(Map<TopicPartition, OffsetAndMetadata> map) {
        map.forEach(
                (tp, offset) ->
                        System.out.println("tp:" + tp + " offset:" + offset.offset())
        );
    }

    @Override
    public void close() {

    }

    @Override
    public void configure(Map<String, ?> map) {

    }
}
