package com.kafka.config;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.util.HashMap;
import java.util.Map;

@Slf4j
@Component
public class ConsumerConfig {

    @Resource
    private ConsumerFactory<String, String> consumerFactory;

    @Bean
    public KafkaListenerContainerFactory<?> containerFactory() {
        // 配置参数
        Map<String, Object> properties = consumerFactory.getConfigurationProperties();
        Map<String, Object> props = new HashMap<>();
        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.GROUP_ID_CONFIG, properties.get("group.id"));
        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
        // 设置每次接收Message的数量
        // props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 100);
        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
        // props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100");
        // props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 10000);
        // props.put(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG, 30000);
        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        // 构建消费端
        //ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
        ConcurrentKafkaListenerContainerFactory<Integer, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(new DefaultKafkaConsumerFactory<>(props));
        factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL);
        // 开启批量监听
        factory.setBatchListener(true);
        // 关闭自动监听
        //factory.setAutoStartup(false);
        return factory;
    }

//    @Scheduled(cron = "*/60 * * * * ?")
//    public void startListener() {
//        MessageListenerContainer container = registry.getListenerContainer("task1");
//        if (!container.isRunning()) {
//            log.info("开启监听");
//            container.start();
//        }
//        container.resume();
//    }
//
//    @Scheduled(cron = "*/120 * * * * ?")
//    public void shutdownListener() {
//        MessageListenerContainer container = registry.getListenerContainer("task1");
//        //log.info("关闭监听");
//        container.pause();
//    }
}
