package org.sean.framework.kafka.logging.delivery;


import org.apache.kafka.clients.producer.ProducerRecord;
import org.sean.framework.logging.Logger;
import org.sean.framework.util.DigestUtil;
import org.springframework.kafka.core.KafkaTemplate;

import java.util.UUID;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

/**
 * @author xielei
 * @since 0.0.1
 */
public class KafkaDeliveryStrategy implements DeliveryStrategy {

    /**
     * Kafka server 不可用时,发送消息会阻塞工作线程
     */
    ThreadPoolExecutor executor = new ThreadPoolExecutor(2, 2, 1,
            TimeUnit.MINUTES, new ArrayBlockingQueue<>(2000), new ThreadPoolExecutor.DiscardPolicy());

    @Override
    public boolean send(KafkaTemplate<Object, Object> producer, String topic, String key, String value) {
        if (producer == null) {
            return false;
        }
        // kafka client 日志级别为debug时,brave 报错,死循环打印日志
        String traceId = DigestUtil.md5Hex16(UUID.randomUUID().toString());
        ProducerRecord<Object, Object> recorder = new ProducerRecord<>(topic, key, value);
        recorder.headers().add(Logger.KEY_TRACE, traceId.getBytes());
        recorder.headers().add(Logger.KEY_SPAN, traceId.getBytes());
        // 异步发送消息
        executor.execute(() -> {
            try {
                producer.send(recorder);
            } catch (Exception e) {
                // do nothing
            }
        });
        return true;
    }

}
