package com.cmnit.gatherdata.utils;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;

import java.io.Serializable;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Future;

/**
 * kafka生产者工具类
 * 1.创建生产者
 * 2.发送消息
 *
 * @author chan
 * @version 1.0
 * @since 2022.3.3
 */
public class KafkaProducerUtils implements Serializable {
    private static final Map<String, KafkaProducer<String, String>> producerCache = new ConcurrentHashMap<>();

    static {
        Runtime.getRuntime().addShutdownHook(new Thread(() -> producerCache.forEach(KafkaProducerUtils::accept)));
    }

    /**
     * 创建KafkaProducer
     *
     * @param brokers  集群参数
     * @param security 是否开启kerberos
     * @return 参数对象
     */
    private static KafkaProducer<String, String> createProducer(String brokers, Boolean security) {
        Properties prop = new Properties();
        prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers);
        prop.put(ProducerConfig.ACKS_CONFIG, "-1");
        prop.put(ProducerConfig.BATCH_SIZE_CONFIG, 131072);
        prop.put(ProducerConfig.LINGER_MS_CONFIG, 250);
        prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        if (security) {
            // kerberos配置
            prop.put("security.protocol", "SASL_PLAINTEXT");
            prop.put("sasl.mechanism", "GSSAPI");
            prop.put("sasl.kerberos.service.name", "kafka");
            prop.put("sasl.jaas.config", ConfigurationManager.getProperty("sasl.jaas.config"));
        }
        return new KafkaProducer<>(prop);
    }

    /**
     * 避免重复创建KafkaProducer
     *
     * @param brokers  集群参数
     * @param security 是否开启kerberos
     * @return 参数对象
     */
    private static KafkaProducer<String, String> getProducer(String brokers, Boolean security) {
        return producerCache.compute(brokers, (k, oldProducer) -> {
            if (oldProducer == null) {
                oldProducer = createProducer(brokers, security);
            }
            return oldProducer;
        });
    }


    /**
     * 带key的消息发送
     *
     * @param security 是否开启kerberos
     * @param brokers  地址
     * @param topic    主题
     * @param key      主键
     * @param message  消息
     * @return 返回值
     */
    public static Future<RecordMetadata> send(Boolean security, String brokers, String topic, String key, String message) {
        KafkaProducer<String, String> producer = getProducer(brokers, security);
        ProducerRecord<String, String> producerRecord = new ProducerRecord<>(topic, key, message);
        return producer.send(producerRecord);
    }


    /**
     * 不带key的消息发送
     *
     * @param security 是否开启kerberos
     * @param brokers  地址
     * @param topic    主题
     * @param message  主键
     * @return 返回值
     */
    public static Future<RecordMetadata> send(Boolean security, String brokers, String topic, String message) {
        KafkaProducer<String, String> producer = getProducer(brokers, security);
        ProducerRecord<String, String> producerRecord = new ProducerRecord<>(topic, message);
        return producer.send(producerRecord);
    }


    /**
     * close method
     *
     * @param key 主键
     * @param v   KafkaProducer
     */
    private static void accept(String key, KafkaProducer<String, String> v) {
        try {
            v.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
