package com.xl.bigdata.consumer;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.util.ArrayList;
import java.util.List;
import java.util.Properties;

/**
 * @ClassName ConsumerAPI
 * @Description TODO
 * @Author xl.gao
 * @Date 2022/3/23 10:48
 * @Version
 */
public class ConsumerAPI {

    public static volatile KafkaConsumer<String, String> kafkaConsumer;

    /**
     * 获得链接
     */
    public static KafkaConsumer<String, String> getKafkaConsumer(String bootstrapServers, String topic, String groupId) {

        try {
            if(kafkaConsumer == null){
                synchronized(ConsumerAPI.class){
                    if(kafkaConsumer == null){
                        Properties properties = initProperties(bootstrapServers, groupId);
                        // 1 创建一个消费者  "", "hello"
                        kafkaConsumer = new KafkaConsumer<>(properties);

                        // 2 订阅主题 first
                        ArrayList<String> topics = new ArrayList<>();
                        topics.add(topic);
                        kafkaConsumer.subscribe(topics);
                    } else {
                        System.out.println("ConsumerAPI - 重复使用kafkaConsumer连接...双重检查最内层...");
                    }
                }
            } else {
                System.out.println("ConsumerAPI - 重复使用kafkaConsumer连接...双重检查最外层...");
            }
        } catch (Exception e) {

            if (kafkaConsumer != null) {
                kafkaConsumer.close();
            }
            System.out.println("ConsumerAPI - 初期化kafkaConsumer连接失败...");
            e.printStackTrace();
        }

        return kafkaConsumer;

    }

    public static Properties initProperties(String bootstrapServers, String groupId){
        // 0 配置
        Properties properties = new Properties();
        // 连接 bootstrap.servers
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,bootstrapServers);

        // 反序列化
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());

        // 配置消费者组id
        properties.put(ConsumerConfig.GROUP_ID_CONFIG,groupId);

        // 自动提交
        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,true);

        // 提交时间间隔
        properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,1000);

        // session超时时间
        properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG,"30000");

        // 提交时间间隔
        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"latest");

        return properties;
    }

    public static void closeKafkaConsumer(){
        kafkaConsumer.close();
    }


}
