package com.kafka.demo.config;

import com.kafka.demo.config.yml.KafkaProducerProperties;
import com.kafka.demo.config.yml.MultiKafkaProducerProperties;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;

import javax.annotation.PostConstruct;
import java.time.Duration;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

@Configuration
@EnableKafka
@RequiredArgsConstructor
@Slf4j
public class KafkaProducerConfig {

    private final ConfigurableListableBeanFactory beanFactory;
    private final MultiKafkaProducerProperties multiKafkaProperties;
    private final KafkaProperties springKafkaProperties; // 注入 Spring Boot 默认配置

    @PostConstruct
    public void init() {
        multiKafkaProperties.getService().forEach((name, custom) -> {
            registerKafkaTemplate(name, custom);
        });
    }

    private void registerKafkaTemplate(String name, KafkaProducerProperties custom) {
        String factoryBeanName = name + "ProducerFactory";
        String templateBeanName = name + "KafkaTemplate";

        // 1. 获取 Spring Boot 默认配置
        KafkaProperties.Producer defaultProducer = springKafkaProperties.getProducer();

        // 2. 构建最终配置（自定义覆盖默认）
        Map<String, Object> config = new HashMap<>();

        // bootstrap.servers
        config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
                firstNonNull(custom.getBootstrapServers(), springKafkaProperties.getBootstrapServers()));

        // key.serializer
        config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
                firstNonNull(custom.getKeySerializer(), defaultProducer.getKeySerializer()));

        // value.serializer
        config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
                firstNonNull(custom.getValueSerializer(), defaultProducer.getValueSerializer()));

        // acks
        config.put(ProducerConfig.ACKS_CONFIG,
                firstNonNull(custom.getAcks(), defaultProducer.getAcks()));

        // retries
        config.put(ProducerConfig.RETRIES_CONFIG,
                firstNonNull(custom.getRetries(), defaultProducer.getRetries()));

        // batch.size
        config.put(ProducerConfig.BATCH_SIZE_CONFIG,
                parseSize(firstNonNull(custom.getBatchSize(), defaultProducer.getBatchSize())));

        // buffer.memory
        config.put(ProducerConfig.BUFFER_MEMORY_CONFIG,
                parseSize(firstNonNull(custom.getBufferMemory(), defaultProducer.getBufferMemory())));

        // linger.ms
//        config.put(ProducerConfig.LINGER_MS_CONFIG,
//                firstNonNull(custom.getLingerMs(), defaultProducer.getLingerMs()));

        // request.timeout.ms
//        config.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG,
//                firstNonNull(custom.getRequestTimeoutMs(), defaultProducer.getRequestTimeout().toMillis()));

        // retry.backoff.ms
//        Duration backoff = defaultProducer.getRetryBackoff();
//        config.put(ProducerConfig.RETRY_BACKOFF_MS_CONFIG,
//                firstNonNull(custom.getConnectionTimeoutMs(), backoff != null ? (int)backoff.toMillis() : 100));

        // 3. 创建 ProducerFactory
        DefaultKafkaProducerFactory<String, String> pf =
                new DefaultKafkaProducerFactory<>(config);

        // 4. 注册到 Spring 容器
        beanFactory.registerSingleton(factoryBeanName, pf);
        beanFactory.registerSingleton(templateBeanName, new KafkaTemplate<>(pf));

        log.info("注册 KafkaTemplate: {}" , templateBeanName);
    }

    // 工具方法：取第一个非 null 的值
    private <T> T firstNonNull(T custom, T defaultValue) {
        return custom != null ? custom : defaultValue;
    }

    // 解析 DataSize（支持 "16KB", "32MB" 等）
    private int parseSize(Object size) {
        if (size instanceof String) {
            return (int) parseDataSize((String) size);
        } else if (size instanceof Integer) {
            return (Integer) size;
        } else if (size instanceof Long) {
            return ((Long) size).intValue();
        }
        return 32 * 1024 * 1024; // fallback
    }

    // 手动解析 DataSize（兼容无 DataSize 环境）
    private long parseDataSize(String size) {
        if (size == null || size.trim().isEmpty()) {
            throw new IllegalArgumentException("Size must not be null or empty");
        }

        size = size.trim().toUpperCase();

        // 正则：匹配数字 + 可选单位（KB, MB, GB 等）
        Pattern pattern = java.util.regex.Pattern.compile("^(\\d+)\\s*([A-Z]+)?$");
        Matcher matcher = pattern.matcher(size);

        if (!matcher.matches()) {
            throw new IllegalArgumentException("Invalid size format: " + size);
        }

        long value = Long.parseLong(matcher.group(1));
        String unit = matcher.group(2); // 可能为 null

        // 使用 if-else 替代 switch 表达式（JDK 8 兼容）
        if (unit == null || unit.isEmpty() || "B".equals(unit)) {
            return value;
        } else if ("KB".equals(unit) || "KIB".equals(unit)) {
            return value * 1024;
        } else if ("MB".equals(unit) || "MIB".equals(unit)) {
            return value * 1024 * 1024;
        } else if ("GB".equals(unit) || "GIB".equals(unit)) {
            return value * 1024 * 1024 * 1024;
        } else {
            throw new IllegalArgumentException("Unknown unit: " + unit);
        }
    }
}