package cn.jly.flink.utils;

import com.alibaba.fastjson.JSON;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;

/**
 * @PackageName cn.jly.flink.utils
 * @ClassName KafkaUtils
 * @Description kafka工具类
 * @Author 姬岚洋
 * @Date 2021/1/13 上午 11:19
 */
public class KafkaUtils {
    /**
     * logger
     */
    private static final Logger LOGGER = LoggerFactory.getLogger(KafkaUtils.class);

    /**
     * 配置
     */
    private static final Properties PROPERTIES = new Properties();

    static {
        final InputStream revalueAsStream = KafkaUtils.class.getClassLoader().getResourceAsStream("kafka.properties");
        if (revalueAsStream == null) {
            throw new KafkaUtilsException("类路径下kafka配置文件不存在");
        }
        try {
            PROPERTIES.load(revalueAsStream);
            // 集群地址
            String bootStrapServers = PROPERTIES.getProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG);
            if (StringUtils.isEmpty(bootStrapServers)) {
                bootStrapServers = "localhost:9092";
                PROPERTIES.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootStrapServers);
            }
            PROPERTIES.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
            PROPERTIES.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
            PROPERTIES.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
            PROPERTIES.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
            LOGGER.info("kafka配置文件 -> {}", PROPERTIES);
        } catch (IOException e) {
            throw new KafkaUtilsException(e.getMessage());
        }
    }

    /**
     * 异步发送数据到kafka
     *
     * @param topicName topic name
     * @param value     目标数据
     * @param <V>       数据类型
     */
    public static <V> void writeToKafka(String topicName, V value) {
        writeToKafkaWithCallback(topicName, value, null);
    }

    /**
     * 异步发送数据到kafka，具备回调函数
     *
     * @param topicName topic name
     * @param value     value
     * @param callback  回调函数
     * @param <V>       value类型
     */
    public static <V> void writeToKafkaWithCallback(String topicName, V value, Callback callback) {
        writeToKafkaWithCallback(topicName, null, null, value, callback);
    }

    /**
     * 异步发送数据到kafka，具备回调函数，可以指定分区和key
     *
     * @param topicName topic name
     * @param partition 分区
     * @param key       key
     * @param value     value
     * @param callback  回调函数
     * @param <K>       key的类型
     * @param <V>       value的类型
     */
    public static <K, V> void writeToKafkaWithCallback(String topicName, Integer partition, K key, V value, Callback callback) {
        try (final KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(PROPERTIES)) {
            // 封装ProducerRecord
            final ProducerRecord<String, String> producerRecord = new ProducerRecord<>(topicName, partition, JSON.toJSONString(key), JSON.toJSONString(value));
            kafkaProducer.send(producerRecord, callback);
            kafkaProducer.flush();

            LOGGER.info("成功发送数据给kafka -> {}", JSON.toJSONString(value));
        }
    }

    /**
     * 同步的方式发送数据给kafka
     *
     * @param topicName topic name
     * @param value     value
     * @param <V>       value的类型
     */
    public static <V> void asyncWriteToKafka(String topicName, V value) {
        asyncWriteToKafkaWithCallback(topicName, null, null, value, null);
    }

    /**
     * 同步发送信息给kafka，即发送的时候，会阻塞当前线程，直到收到ack
     *
     * @param topicName topic name
     * @param partition 分区
     * @param key       key
     * @param value     value
     * @param callback  回调函数
     * @param <K>       key的类型
     * @param <V>       value的类型
     */
    public static <K, V> void asyncWriteToKafkaWithCallback(String topicName, Integer partition, K key, V value, Callback callback) {
        try (final KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(PROPERTIES)) {
            final ProducerRecord<String, String> producerRecord = new ProducerRecord<>(topicName, partition, JSON.toJSONString(key), JSON.toJSONString(value));
            kafkaProducer.send(producerRecord, callback).get();
        } catch (Exception e) {
            LOGGER.error("发送给kafka数据失败 -> {}", e.getMessage());
        }
    }

    /**
     * 从kafka中读数据
     *
     * @param topicNameList topic names
     * @param groupId       消费者组
     * @param kClazz        key的class
     * @param vClazz        value的class
     * @param <K>           key的类型
     * @param <V>           value的类型
     * @return
     */
//    public static <K, V> Map<K, V> readFromKafka(List<String> topicNameList, String groupId, Class<K> kClazz, Class<V> vClazz) {
//        HashMap<K, V> map = null;
//
//        PROPERTIES.setProperty("group.id", groupId);
//        try (KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(PROPERTIES)) {
//            kafkaConsumer.subscribe(topicNameList);
//            final ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(1));
//            if (!consumerRecords.isEmpty()) {
//                map = new HashMap<>(consumerRecords.count());
//                for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
//                    map.put(JSON.parseObject(consumerRecord.key(), kClazz), JSON.parseObject(consumerRecord.value(), vClazz));
//                }
//            }
//        }
//
//        return map;
//    }

    /**
     * 自定义异常
     */
    public static class KafkaUtilsException extends RuntimeException {
        public KafkaUtilsException(String message) {
            super(message);
        }
    }
}
