package com.hskn.hss.utils;

import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.util.Properties;

@Component
public class KafkaUtils implements InitializingBean {

    private static String bootstrapServers;
    private static String groupId;
    private static String keyDeserializer;
    private static String valueDdeserializer;
    private static String autoOffsetReset;
    private static Properties pro;

    @Value("${kafka.bootstrapServers}")
    public void setBootstrapServers(String param) {
        bootstrapServers = param;
    }

    @Value("${kafka.groupId}")
    public void setGroupId(String param) {
        groupId = param;
    }

    @Value("${kafka.keyDeserializer}")
    public void setkeyDeserializer(String param) {
        keyDeserializer = param;
    }

    @Value("${kafka.valueDdeserializer}")
    public void setValueDdeserializer(String param) {
        valueDdeserializer = param;
    }

    @Value("${kafka.autoOffsetReset}")
    public void setAutoOffsetReset(String param) {
        autoOffsetReset = param;
    }

    @Override
    public void afterPropertiesSet() {
        Properties props = new Properties();
        // 必须指定的属性
        props.put("bootstrap.servers", bootstrapServers);
        props.put("key.deserializer", keyDeserializer);
        props.put("value.deserializer", valueDdeserializer);
        props.put("group.id", groupId);
        // 可选属性
        props.put("enable.auto.commit", "false");
        props.put("max.poll.records", 50);
        props.put("auto.commit.interval.ms", "1000");
        props.put("auto.offset.reset", autoOffsetReset); // 从最早的消息开始读取
        // 创建consumer实例，订阅topic
        pro = props;
    }

    public static synchronized KafkaConsumer getConsumer(String groupId) {
        pro.put("group.id", groupId);
        KafkaConsumer kafkaConsumer = new KafkaConsumer(pro);
        return kafkaConsumer;
    }
}