package com.pt25.base.dep.kafka;

import com.pt25.base.constants.BaseConstants;
import com.pt25.base.util.CollectionUtil;
import com.pt25.base.util.SpringUtil;
import com.pt25.base.util.StrUtil;
import jakarta.annotation.Resource;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.*;
import org.springframework.stereotype.Component;

import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;


@EnableKafka
@Component
@Configuration
class KafkaConfig {

    @Resource
    private SpringUtil springUtil;

    @Resource
    private KafkaProperties kafkaProperties;

    @Bean
    Map<String, ConcurrentKafkaListenerContainerFactory<String, String>> kafkaListenerContainerFactories() {
        Map<String, ConcurrentKafkaListenerContainerFactory<String, String>> factories = new HashMap<>();
        if (CollectionUtil.isNotEmpty(kafkaProperties.getConsumers()) && kafkaProperties.checkConsumerConfigs()) {
            kafkaProperties.getConsumers().forEach((consumer) -> {
                Map<String, Object> consumerProps = new HashMap<>();
                consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, consumer.getBootstrapServers());
                consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
                consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
                consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, consumer.getAutoOffsetReset());
                consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, consumer.getEnableAutoCommit());
                consumerProps.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, consumer.getAutoCommitInterval());
                consumerProps.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, consumer.getBatchSize());

                // 添加SASL认证配置
                addSaslAuthConfig(consumerProps, consumer.getUsername(), consumer.getPassword());

                ConsumerFactory<String, String> consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProps);
                ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
                factory.setConsumerFactory(consumerFactory);
                factory.setBatchListener(consumer.getBatchListener());
                factory.setConcurrency(consumer.getConcurrency());
                String kafkaListenerContainerFactoryName = consumer.getName();
                factories.put(kafkaListenerContainerFactoryName, factory);
                springUtil.registerBean(kafkaListenerContainerFactoryName, factory);
            });
        }
        return factories;
    }

    @Bean("kafkaTemplates")
    Map<String, KafkaTemplate<String, String>> kafkaTemplates() {
        Map<String, KafkaTemplate<String, String>> templates = new LinkedHashMap<>();
        if (CollectionUtil.isNotEmpty(kafkaProperties.getProducers()) && kafkaProperties.checkProducerConfigs()) {
            kafkaProperties.getProducers().forEach((producer) -> {
                Map<String, Object> producerProps = new HashMap<>();
                producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, producer.getBootstrapServers());
                producerProps.put(ProducerConfig.ACKS_CONFIG, producer.getAcksConfig());
                producerProps.put(ProducerConfig.RETRIES_CONFIG, producer.getRetries());
                producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
                producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);

                // 添加SASL认证配置
                addSaslAuthConfig(producerProps, producer.getUsername(), producer.getPassword());

                ProducerFactory<String, String> producerFactory = new DefaultKafkaProducerFactory<>(producerProps);
                KafkaTemplate<String, String> kafkaTemplate = new KafkaTemplate<>(producerFactory);
                templates.put(producer.getName(), kafkaTemplate);
            });
            if (null != templates.get(kafkaProperties.getPrimary())) {
                templates.put(BaseConstants.DEFAULT, templates.get(kafkaProperties.getPrimary()));
            }
        }
        return templates;
    }

    // 添加SASL认证配置
    private void addSaslAuthConfig(Map<String, Object> props, String username, String password) {
        if (StrUtil.isNotBlank(username) && StrUtil.isNotBlank(password)) {
            props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
//            验证模式不同，采用不同的配置方式
//            props.put(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-256");
//            String jaasTemplate = "org.apache.kafka.common.security.scram.ScramLoginModule required username=\"%s\" password=\"%s\";";
            props.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
            String jaasTemplate = "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"%s\" password=\"%s\";";
            String jaasConfig = String.format(jaasTemplate, username, password);
            props.put(SaslConfigs.SASL_JAAS_CONFIG, jaasConfig);
        }
    }
}