package com.sunday.common.mq.kafka.study.spring.e17_Default_Error_Handler;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.kafka.DefaultKafkaConsumerFactoryCustomizer;
import org.springframework.boot.autoconfigure.kafka.DefaultKafkaProducerFactoryCustomizer;
import org.springframework.boot.autoconfigure.kafka.KafkaConnectionDetails;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.config.TopicBuilder;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import org.springframework.kafka.support.serializer.JsonSerializer;

import java.util.Map;

/**
 * https://docs.spring.io/spring-kafka/docs/current/reference/html/#annotation-error-handling
 */
@Slf4j
@SpringBootApplication
public class Application {

    public static void main(String[] args) {
        SpringApplication.run(Application.class, args);
    }

    @Bean
    public DefaultKafkaConsumerFactory<?, ?> kafkaConsumerFactory(KafkaConnectionDetails connectionDetails,
                                                                  ObjectProvider<DefaultKafkaConsumerFactoryCustomizer> customizers,
                                                                  KafkaProperties prop
    ) {
        Map<String, Object> properties = prop.buildConsumerProperties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, connectionDetails.getProducerBootstrapServers());

        DefaultKafkaConsumerFactory<?, ?> factory = new DefaultKafkaConsumerFactory<>(
                properties,
                new StringDeserializer(),
                new StringDeserializer()
        );

        customizers.orderedStream().forEach((customizer) -> customizer.customize(factory));
        return factory;
    }

    @Bean
    public DefaultKafkaProducerFactory<?, ?> kafkaProducerFactory(KafkaConnectionDetails connectionDetails,
                                                                  ObjectProvider<DefaultKafkaProducerFactoryCustomizer> customizers,
                                                                  KafkaProperties prop
    ) {
        Map<String, Object> properties = prop.buildProducerProperties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, connectionDetails.getProducerBootstrapServers());

        /**
         * 在这个情况下，错误信息显示了Kafka中的一个超时异常。超时时间通常在Kafka的配置中进行设置，具体来说，您可以调整以下两个参数：
         * metadata.fetch.timeout.ms: 这是用来获取metadata的超时时间，即Kafka在获取metadata时等待的最长时间。默认值为60000毫秒（60秒）。
         * request.timeout.ms: 这是生产者发送消息的请求超时时间。如果经过这段时间后仍未得到响应，将会触发超时异常。默认值也是60000毫秒（60秒）。
         */
        properties.put(CommonClientConfigs.REQUEST_TIMEOUT_MS_CONFIG, "1");

        DefaultKafkaProducerFactory<?, ?> factory = new DefaultKafkaProducerFactory<>(
                properties,
                new StringSerializer(),
                new StringSerializer()
        );

        String transactionIdPrefix = prop.getProducer().getTransactionIdPrefix();
        if (transactionIdPrefix != null) {
            factory.setTransactionIdPrefix(transactionIdPrefix);
        }
        customizers.orderedStream().forEach((customizer) -> customizer.customize(factory));
        return factory;
    }

    @Bean
    public NewTopic topic11() {
        return TopicBuilder.name("topic11")
                .partitions(3)
                .replicas(1)
                .build();
    }

}
