package com.sunday.common.mq.kafka.study.spring.e12_Serialization_Deserialization_Message_Conversion_Type;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.*;
import org.apache.kafka.common.utils.Bytes;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.kafka.DefaultKafkaConsumerFactoryCustomizer;
import org.springframework.boot.autoconfigure.kafka.DefaultKafkaProducerFactoryCustomizer;
import org.springframework.boot.autoconfigure.kafka.KafkaConnectionDetails;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.PropertyMapper;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.config.TopicBuilder;
import org.springframework.kafka.core.*;
import org.springframework.kafka.listener.KafkaListenerErrorHandler;
import org.springframework.kafka.support.ProducerListener;
import org.springframework.kafka.support.converter.RecordMessageConverter;
import org.springframework.kafka.support.serializer.DelegatingByTypeSerializer;
import org.springframework.kafka.support.serializer.DelegatingDeserializer;

import java.util.Map;

/**
 * https://docs.spring.io/spring-kafka/docs/current/reference/html/#by-topic
 */
@Slf4j
@SpringBootApplication
public class TypeApplication {

    public static void main(String[] args) {
        SpringApplication.run(TypeApplication.class, args);
    }

    @Bean
    public DefaultKafkaConsumerFactory<?, ?> kafkaConsumerFactory(KafkaConnectionDetails connectionDetails,
                                                                  ObjectProvider<DefaultKafkaConsumerFactoryCustomizer> customizers,
                                                                  KafkaProperties prop
    ) {
        Map<String, Object> properties = prop.buildConsumerProperties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, connectionDetails.getProducerBootstrapServers());

        DefaultKafkaConsumerFactory<Object, Object> factory = new DefaultKafkaConsumerFactory<>(
                properties,
                null,
                new DelegatingDeserializer(
                        Map.of(
                                byte[].class.getName(), new ByteArrayDeserializer(),
                                Bytes.class.getName(), new BytesDeserializer(),
                                String.class.getName(), new StringDeserializer()
                        ))
        );

        customizers.orderedStream().forEach((customizer) -> customizer.customize(factory));
        return factory;
    }

    @Bean
    public DefaultKafkaProducerFactory<?, ?> kafkaProducerFactory(KafkaConnectionDetails connectionDetails,
                                                                  ObjectProvider<DefaultKafkaProducerFactoryCustomizer> customizers,
                                                                  KafkaProperties prop
    ) {
        Map<String, Object> properties = prop.buildProducerProperties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, connectionDetails.getProducerBootstrapServers());

//        DefaultKafkaProducerFactory<?, ?> factory = new DefaultKafkaProducerFactory<>(properties);
        DefaultKafkaProducerFactory<?, ?> factory = new DefaultKafkaProducerFactory<>(
                properties,
                null,
                new DelegatingByTypeSerializer(
                        Map.of(
                                byte[].class, new ByteArraySerializer(),
                                Bytes.class, new BytesSerializer(),
                                String.class, new StringSerializer())));

        String transactionIdPrefix = prop.getProducer().getTransactionIdPrefix();
        if (transactionIdPrefix != null) {
            factory.setTransactionIdPrefix(transactionIdPrefix);
        }
        customizers.orderedStream().forEach((customizer) -> customizer.customize(factory));
        return factory;
    }

    @Bean
    public MyProducerInterceptor myProducerInterceptor() {
        return new MyProducerInterceptor();
    }

    @Bean
    public KafkaTemplate<?, ?> kafkaTemplate(ProducerFactory<Object, Object> kafkaProducerFactory,
                                             ProducerListener<Object, Object> kafkaProducerListener,
                                             ObjectProvider<RecordMessageConverter> messageConverter,
                                             KafkaProperties prop,
                                             MyProducerInterceptor myProducerInterceptor
    ) {
        PropertyMapper map = PropertyMapper.get().alwaysApplyingWhenNonNull();
        KafkaTemplate<Object, Object> kafkaTemplate = new KafkaTemplate<>(kafkaProducerFactory);
        messageConverter.ifUnique(kafkaTemplate::setMessageConverter);
        map.from(kafkaProducerListener).to(kafkaTemplate::setProducerListener);
        map.from(prop.getTemplate().getDefaultTopic()).to(kafkaTemplate::setDefaultTopic);
        map.from(prop.getTemplate().getTransactionIdPrefix()).to(kafkaTemplate::setTransactionIdPrefix);

        kafkaTemplate.setProducerInterceptor(myProducerInterceptor);

        return kafkaTemplate;
    }

    /**
     * https://docs.spring.io/spring-kafka/docs/current/reference/html/#kafka-validation
     */
    @Bean
    public KafkaListenerErrorHandler validationErrorHandler() {
        return (m, e) -> {
            log.error("{} - {}", m, e.toString());
            return null;
        };
    }

    @Bean
    public NewTopic topic7() {
        return TopicBuilder.name("topic7")
                .partitions(3)
                .replicas(1)
                .build();
    }

}
