package com.wyx.springbootspringkafkademo.config;

import java.util.HashMap;
import java.util.Map;

import com.fasterxml.jackson.databind.JsonSerializer;
import com.wyx.springbootspringkafkademo.entity.User;
import com.wyx.springbootspringkafkademo.producer.KafkaProducerService;
import com.wyx.springbootspringkafkademo.producer.UserProducer;
import io.confluent.kafka.serializers.KafkaAvroDeserializer;
import io.confluent.kafka.serializers.KafkaAvroDeserializerConfig;
import io.confluent.kafka.serializers.KafkaAvroSerializer;
import org.apache.avro.generic.GenericRecord;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.TopicBuilder;
import org.springframework.kafka.core.*;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.support.serializer.ErrorHandlingDeserializer;
import org.springframework.kafka.support.serializer.JsonDeserializer;

/**
 * @author wyx
 * @Date 2025-03-27
 */
@Configuration
public class KafkaConfig {

    @Value ("${spring.kafka.bootstrap-servers}") private String bootstrapServers;

    @Bean
    public ProducerFactory<String, Object> producerFactory() {
        Map<String, Object> configProps = new HashMap<>();
        configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        //    configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class);

        // 可选：生产环境推荐配置
        configProps.put(ProducerConfig.ACKS_CONFIG, "all"); // 所有副本确认
        //Kafka生产者将为每个消息分配一个唯一的序列号，确保即使生产者重启或网络重试，消息也不会被重复发送。
        configProps.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true); // 幂等性
        configProps.put(ProducerConfig.RETRIES_CONFIG, 3); // 重试次数
        //配置拦截器
        configProps.put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG,
            "com.wyx.springbootspringkafkademo.interceptor.MyInterceptor");

        /**
         * MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION
         * 控制生产者在单个连接上可以发送的未确认请求的最大数量。
         * 默认值为 5，表示生产者可以同时发送 5 个未确认的请求。
         * 如果设置为 1，生产者会等待上一个请求收到确认后再发送下一个请求，这可以保证消息的顺序性，但不会影响分区策略。
         */
        configProps.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, 5); // 保证顺序

        return new DefaultKafkaProducerFactory<>(configProps);
    }

    @Bean
    public KafkaTemplate<String, Object> kafkaTemplate() {
        return new KafkaTemplate<>(producerFactory());
    }

    @Bean
    public ConsumerFactory<String, String> consumerFactory() {
        Map<String, Object> props = new HashMap<>();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        props.put(ConsumerConfig.GROUP_ID_CONFIG, "my-group");
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        //    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class);

        // 可选：生产环境推荐配置
        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); // 或 "earliest"
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false); // false-关闭自动提交
        props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 500); // 每次poll最大记录数
        props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, 300000); // poll间隔超时(5分钟)

        return new DefaultKafkaConsumerFactory<>(props);
    }

    @Bean
    public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
        ConcurrentKafkaListenerContainerFactory<String, String> factory =
            new ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(consumerFactory());
        // 设置手动提交模式
        factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL); // 手动提交
        return factory;
    }

    // Kafka Admin 配置
    @Bean
    public KafkaAdmin kafkaAdmin() {
        Map<String, Object> configs = new HashMap<>();
        configs.put("bootstrap.servers", bootstrapServers); // Kafka 服务器地址
        return new KafkaAdmin(configs);
    }

    // 自动创建 Topic
    @Bean
    public NewTopic myTopic() {
        return TopicBuilder.name(KafkaProducerService.TOPIC) // 指定 Topic 名称
            .partitions(3)                // 指定分区数量
            .replicas(2)                  // 指定副本数量
            .build();
    }

    @Bean
    public NewTopic myTopic2() {
        return TopicBuilder.name(UserProducer.TOPIC) // 指定 Topic 名称
            .partitions(3)                // 指定分区数量
            .replicas(2)                  // 指定副本数量
            .build();
    }

    // 1. JSON 序列化的 KafkaTemplate
    @Bean
    public KafkaTemplate<String, User> jsonKafkaTemplate(KafkaProperties properties) {
        Map<String, Object> producerProps = new HashMap<>(properties.buildProducerProperties(null));
        producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);

        ProducerFactory<String, User> factory = new DefaultKafkaProducerFactory<>(producerProps);
        return new KafkaTemplate<>(factory);
    }

    // 2. Avro 序列化的 KafkaTemplate
    @Bean
    public KafkaTemplate<String, GenericRecord> avroKafkaTemplate(KafkaProperties properties) {
        Map<String, Object> producerProps = new HashMap<>(properties.buildProducerProperties(null));
        producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class); // 需配合Schema Registry
        producerProps.put("schema.registry.url", "http://localhost:8081");
        ProducerFactory<String, GenericRecord> factory = new DefaultKafkaProducerFactory<>(producerProps);
        return new KafkaTemplate<>(factory);
    }


    // 配置一个使用String反序列化的消费者工厂
    @Bean
    public ConsumerFactory<String, String> stringConsumerFactory() {
        Map<String, Object> props = new HashMap<>();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        props.put(ConsumerConfig.GROUP_ID_CONFIG, "string-group");
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        return new DefaultKafkaConsumerFactory<>(props);
    }

    // 配置一个使用JSON反序列化的消费者工厂
    @Bean
    public ConsumerFactory<String, Object> jsonConsumerFactory() {
        Map<String, Object> props = new HashMap<>();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        props.put(ConsumerConfig.GROUP_ID_CONFIG, "json-group");
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class.getName());
        props.put(ErrorHandlingDeserializer.VALUE_DESERIALIZER_CLASS, JsonDeserializer.class.getName());
        return new DefaultKafkaConsumerFactory<>(props, new StringDeserializer(), new JsonDeserializer<>(Object.class));
    }

    @Bean
    public ConsumerFactory<String, Object> avroConsumerFactory() {
        Map<String, Object> props = new HashMap<>();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,bootstrapServers);
        props.put(ConsumerConfig.GROUP_ID_CONFIG, "avro-group");
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class);
        props.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081");
        //确保在反序列化器配置中启用了SpecificAvroReader，以便将Avro记录反序列化为具体的Java类（如User）
        props.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, true); // 确保使用SpecificAvroReader        props.put(ErrorHandlingDeserializer.VALUE_DESERIALIZER_CLASS, KafkaAvroDeserializer.class.getName());

        return new DefaultKafkaConsumerFactory<>(props, new StringDeserializer(), new KafkaAvroDeserializer());
    }

    // 配置一个使用String反序列化的监听容器工厂
    @Bean
    public ConcurrentKafkaListenerContainerFactory<String, String> stringKafkaListenerContainerFactory() {
        ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(stringConsumerFactory());
        return factory;
    }

    // 配置一个使用JSON反序列化的监听容器工厂
    @Bean
    public ConcurrentKafkaListenerContainerFactory<String, Object> jsonKafkaListenerContainerFactory() {
        ConcurrentKafkaListenerContainerFactory<String, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(jsonConsumerFactory());
        return factory;
    }

    @Bean
    public ConcurrentKafkaListenerContainerFactory<String, Object> avroKafkaListenerContainerFactory() {
        ConcurrentKafkaListenerContainerFactory<String, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(avroConsumerFactory());
        // 设置AckMode为MANUAL，以支持手动确认
        factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL);
        factory.getContainerProperties().setPollTimeout(3000); // 设置poll超时时间
        return factory;
    }
}
