package com.gitee.zhangchenyan.takin.event.consumer;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.util.StringUtils;

import java.time.Duration;
import java.util.Collections;
import java.util.Properties;
import java.util.concurrent.CompletableFuture;

/**
 * @Deacription 消费者基类
 * @Author zl
 * @Date 2022/9/16 22:55
 **/
@Slf4j
public abstract class EventBusSubscriber implements IEventBusSubscriber, InitializingBean {

    /**
     * 卡夫卡引导服务器
     */
    @Value("${takin.kafka.bootstrap-servers:#{null}}")
    private String bootstrapServers;

    @Override
    public void afterPropertiesSet() {
        if (StringUtils.hasLength(bootstrapServers)) {
            initKafkaConsumerAsync();
        } else {
            log.error("Kafka Not Connect ,Checking KafkaConsumer Configuration Parameters");
        }
    }

    /**
     * init卡夫卡消费者异步
     */
    private void initKafkaConsumerAsync() {
        CompletableFuture.runAsync(() -> {
            try {
                String topicName = getTopicName();
                String groupId = getGroupId();
                int maxPollRecords = getMaxPollRecords();
                //创建消费者配置信息
                Properties properties = new Properties();
                //链接的集群
                properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
                //开启自动提交
                properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
                //自动提交的延迟
                properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");
                //
                properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, maxPollRecords);
                //key,value的反序列化
                properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
                properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
                //消费者组
                properties.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
                KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
                kafkaConsumer.subscribe(Collections.singletonList(topicName));
                while (true) {
                    ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofSeconds(5));
                    for (ConsumerRecord<String, String> record : records.records(topicName)) {
                        log.info("subscriber = {},topic = {} ,partition = {},groupId = {}, data = {}", this.getClass().getSimpleName(), topicName, record.partition(), groupId, record.value());
                        execute(record.value());
                    }
                }
            } catch (Exception ex) {
                log.error("Kafka failed to connect : " + ex);
            }
        });
    }
}
