
package com.windy.kafka.trace.producer;

import com.windy.common.transmit.kafka.KafkaProducerTransmitHeader;
import org.apache.kafka.clients.consumer.ConsumerGroupMetadata;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.Metric;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.ProducerFencedException;

import java.time.Duration;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Future;

public class TracingKafkaProducer<K, V> implements Producer<K, V> {

    private Producer<K, V> producer;
    private List<MessageSendProcessor> tracers;
    private KafkaProducerTransmitHeader kafkaProducerTransmitHeader;


    public TracingKafkaProducer(Producer<K, V> producer, List<MessageSendProcessor> tracers, KafkaProducerTransmitHeader kafkaProducerTransmitHeader) {
        this.producer = producer;
        this.tracers = tracers;
        this.kafkaProducerTransmitHeader=kafkaProducerTransmitHeader;
    }


    @Override
    public void initTransactions() {
        producer.initTransactions();
    }

    @Override
    public void beginTransaction() throws ProducerFencedException {
        producer.beginTransaction();
    }

    @Override
    public void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> map, String s)
            throws ProducerFencedException {
        producer.sendOffsetsToTransaction(map, s);
    }

    @Override
    public void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, ConsumerGroupMetadata groupMetadata) throws ProducerFencedException {

    }

    @Override
    public void commitTransaction() throws ProducerFencedException {
        producer.commitTransaction();
    }

    @Override
    public void abortTransaction() throws ProducerFencedException {
        producer.abortTransaction();
    }

    @Override
    public Future<RecordMetadata> send(ProducerRecord<K, V> record) {
        return send(record, null);
    }

    @Override
    public Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback) {
    /*
    // Create wrappedRecord because headers can be read only in record (if record is sent second time)
    ProducerRecord<K, V> wrappedRecord = new ProducerRecord<>(record.topic(),
        record.partition(),
        record.timestamp(),
        record.key(),
        record.value(),
        record.headers());
    */

        //发送前回调处理消息
        try{
            tracers.stream().forEach(messageSendProcessor -> {
                messageSendProcessor.beforeProcessMessage(record);
            });
            kafkaProducerTransmitHeader.transmitHeader(()->record.headers());
            return producer.send(record);
        }finally {
            tracers.stream().forEach(messageSendProcessor -> {
                messageSendProcessor.afterProcessorMessage(record);
            });

        }

    }

    @Override
    public void flush() {
        producer.flush();
    }

    @Override
    public List<PartitionInfo> partitionsFor(String topic) {
        return producer.partitionsFor(topic);
    }

    @Override
    public Map<MetricName, ? extends Metric> metrics() {
        return producer.metrics();
    }

    @Override
    public void close() {
        producer.close();
    }

    @Override
    public void close(Duration duration) {
        producer.close(duration);
    }


}
