package org.lql.chaos.kafka.example.config;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.lql.chaos.kafka.example.partition.MasterPartitioner;
import org.lql.chaos.kafka.example.serializer.KryoDeserializer;
import org.lql.chaos.kafka.example.serializer.KryoSerializer;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.serializer.ErrorHandlingDeserializer;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import org.springframework.kafka.support.serializer.JsonSerializer;

import java.util.HashMap;
import java.util.Map;

/**
 * @author: liangqinglong
 * @date: 2025-08-10 02:59
 * @description:
 **/
@Slf4j
@Configuration
public class KafkaTemplateConfig {

	private final KafkaProperties kafkaProperties;

	public KafkaTemplateConfig(KafkaProperties kafkaProperties) {
		this.kafkaProperties = kafkaProperties;
	}

	// 公共方法：构建Producer配置Map，支持自定义序列化器
	private Map<String, Object> buildProducerProps(Class<?> valueSerializer) {
		Map<String, Object> props = new HashMap<>(kafkaProperties.getProducer().buildProperties());
		props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, String.join(",", kafkaProperties.getBootstrapServers()));
		props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); // 自定义key序列化器
		props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, valueSerializer); // 自定义value序列化器
		props.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, MasterPartitioner.class); // 自定义分区策略
		props.put(ProducerConfig.ACKS_CONFIG, "1"); // 自定义确认机制: 0、1、-1/all （默认 1）
		return props;
	}

	// 公共方法：构建Consumer配置Map，支持自定义反序列化器及是否使用ErrorHandlingDeserializer包装
	private Map<String, Object> buildConsumerProps(Class<?> valueDeserializer, boolean useErrorHandling) {
		Map<String, Object> props = new HashMap<>(kafkaProperties.buildConsumerProperties());
		props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, String.join(",", kafkaProperties.getBootstrapServers()));
		if (useErrorHandling) {
			props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class); // 自定义key反序列化器
			props.put(ErrorHandlingDeserializer.KEY_DESERIALIZER_CLASS, StringDeserializer.class); // 默认key反序列化器
			props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class); // 自定义value反序列化器
			props.put(ErrorHandlingDeserializer.VALUE_DESERIALIZER_CLASS, valueDeserializer); // 默认value反序列化器
		} else {
			props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); // 自定义key反序列化器
			props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializer); // 自定义value反序列化器
		}

		// JSON反序列化特别配置
		if (valueDeserializer.equals(JsonDeserializer.class)) {
			props.put(JsonDeserializer.TRUSTED_PACKAGES, "*");
		}

		return props;
	}

	// ProducerFactory & KafkaTemplate for String
	@Bean
	public ProducerFactory<String, String> producerFactoryString() {
		return new DefaultKafkaProducerFactory<>(buildProducerProps(StringSerializer.class));
	}

	@Bean(name = "kafkaTemplateString")
	public KafkaTemplate<String, String> kafkaTemplateString() {
		return new KafkaTemplate<>(producerFactoryString());
	}

	// ProducerFactory & KafkaTemplate for Bytes
	@Bean
	public ProducerFactory<String, byte[]> producerFactoryBytes() {
		return new DefaultKafkaProducerFactory<>(buildProducerProps(ByteArraySerializer.class));
	}

	@Bean(name = "kafkaTemplateBytes")
	public KafkaTemplate<String, byte[]> kafkaTemplateBytes() {
		return new KafkaTemplate<>(producerFactoryBytes());
	}

	// ProducerFactory & KafkaTemplate for JSON
	@Bean
	public ProducerFactory<String, Object> producerFactoryJson() {
		Map<String, Object> props = buildProducerProps(JsonSerializer.class);
		props.put(JsonSerializer.ADD_TYPE_INFO_HEADERS, true);
		return new DefaultKafkaProducerFactory<>(props);
	}

	@Bean(name = "kafkaTemplateJson")
	public KafkaTemplate<String, Object> kafkaTemplateJson() {
		return new KafkaTemplate<>(producerFactoryJson());
	}

	// ProducerFactory & KafkaTemplate for Kryo
	@Bean
	public ProducerFactory<String, Object> producerFactoryKryo() {
		return new DefaultKafkaProducerFactory<>(buildProducerProps(KryoSerializer.class));
	}

	@Bean(name = "kafkaTemplateKryo")
	public KafkaTemplate<String, Object> kafkaTemplateKryo() {
		return new KafkaTemplate<>(producerFactoryKryo());
	}

	// ConsumerFactory & KafkaListenerContainerFactory for String
	@Bean
	public ConsumerFactory<String, Object> consumerFactoryString() {
		return new DefaultKafkaConsumerFactory<>(buildConsumerProps(StringDeserializer.class, true));
	}

	@Bean(name = "kafkaListenerContainerFactoryString")
	public ConcurrentKafkaListenerContainerFactory<String, Object> kafkaListenerContainerFactoryString() {
		ConcurrentKafkaListenerContainerFactory<String, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
		factory.setConsumerFactory(consumerFactoryString());
		return factory;
	}

	// ConsumerFactory & KafkaListenerContainerFactory for Bytes
	@Bean
	public ConsumerFactory<String, byte[]> consumerFactoryByte() {
		return new DefaultKafkaConsumerFactory<>(buildConsumerProps(ByteArrayDeserializer.class, true));
	}

	@Bean(name = "kafkaListenerContainerFactoryByte")
	public ConcurrentKafkaListenerContainerFactory<String, byte[]> kafkaListenerContainerFactoryByte() {
		ConcurrentKafkaListenerContainerFactory<String, byte[]> factory = new ConcurrentKafkaListenerContainerFactory<>();
		factory.setConsumerFactory(consumerFactoryByte());
		return factory;
	}

	// ConsumerFactory & KafkaListenerContainerFactory for JSON
	@Bean
	public ConsumerFactory<String, Object> consumerFactoryJson() {
		return new DefaultKafkaConsumerFactory<>(buildConsumerProps(JsonDeserializer.class, false));
	}

	@Bean(name = "kafkaListenerContainerFactoryJson")
	public ConcurrentKafkaListenerContainerFactory<String, Object> kafkaListenerContainerFactoryJson() {
		ConcurrentKafkaListenerContainerFactory<String, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
		factory.setConsumerFactory(consumerFactoryJson());
		return factory;
	}

	// ConsumerFactory & KafkaListenerContainerFactory for Kryo
	@Bean
	public ConsumerFactory<String, Object> consumerFactoryKryo() {
		return new DefaultKafkaConsumerFactory<>(buildConsumerProps(KryoDeserializer.class, true));
	}

	@Bean(name = "kafkaListenerContainerFactoryKryo")
	public ConcurrentKafkaListenerContainerFactory<String, Object> kafkaListenerContainerFactoryKryo() {
		ConcurrentKafkaListenerContainerFactory<String, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
		factory.setConsumerFactory(consumerFactoryKryo());
		return factory;
	}
}
