package com.laolei.core.kafka;

import java.io.IOException;
import java.lang.reflect.Type;
import java.nio.charset.Charset;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cglib.beans.BeanMap;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.KafkaNull;
import org.springframework.kafka.support.converter.BatchMessagingMessageConverter;
import org.springframework.kafka.support.converter.ConversionException;
import org.springframework.kafka.support.converter.MessageConverter;
import org.springframework.kafka.support.converter.MessagingMessageConverter;
import org.springframework.kafka.support.serializer.JsonSerializer;
import org.springframework.retry.RetryPolicy;
import org.springframework.retry.backoff.NoBackOffPolicy;
import org.springframework.retry.policy.SimpleRetryPolicy;
import org.springframework.retry.support.RetryTemplate;
import org.springframework.util.StringUtils;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.MapperFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.type.TypeFactory;

@EnableKafka
@Configuration
@EnableConfigurationProperties({ CoreConfig.class, ProducerConfig.class, ConsumerConfig.class })
public class KafkaConfig {

	@Bean
	@ConditionalOnMissingBean(ObjectMapper.class)
	public ObjectMapper objectMapper() {
		ObjectMapper objectMapper = new ObjectMapper();
		objectMapper.configure(MapperFeature.DEFAULT_VIEW_INCLUSION, false);
		objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
		return objectMapper;
	}

	private String getKey(String key) {
		if (key == null)
			return null;
		String newkey = "";
		for (char chars : key.toCharArray()) {
			if (chars >= 'A' && chars <= 'Z') {
				newkey += (char) '.';
				newkey += (char) (chars + 32);
			} else
				newkey += chars;
		}
		return newkey;
	}

	@SuppressWarnings({ "unchecked" })
	private Map<String, Object> toMap(Object... beans) {
		Map<String, Object> map = new HashMap<String, Object>();
		for (Object bean : beans) {
			BeanMap.create(bean).forEach((Object key, Object value) -> {
				if (value != null)
					map.put(getKey(String.valueOf(key)), value);
			});
		}
		return map;
	}

	private void def(Map<String, Object> configs, String name, Object value) {
		if (StringUtils.isEmpty(configs.get(name)))
			configs.put(name, value);
	}

	@Bean
	public ProducerFactory<String, Object> producerFactory(CoreConfig kafkaCore, ProducerConfig kafkaProducer) {
		Map<String, Object> configs = this.toMap(kafkaCore, kafkaProducer);
		this.def(configs, "bootstrap.servers", "localhost:9092");
		this.def(configs, "key.serializer", StringSerializer.class);
		this.def(configs, "value.serializer", JsonSerializer.class);
		return new DefaultKafkaProducerFactory<>(configs);
	}

	@Bean
	public KafkaTemplate<String, Object> kafkaTemplate(ProducerFactory<String, Object> producerFactory) {
		return new KafkaTemplate<>(producerFactory, true);
	}

	@Bean
	public ConsumerFactory<String, byte[]> consumerFactory(CoreConfig kafkaCore, ConsumerConfig kafkaConsumer) throws JsonProcessingException {
		Map<String, Object> configs = this.toMap(kafkaCore, kafkaConsumer);
		this.def(configs, "bootstrap.servers", "localhost:9092");
		this.def(configs, "key.deserializer", StringDeserializer.class);
		this.def(configs, "value.deserializer", ByteArrayDeserializer.class);
		this.def(configs, "group.id", "lei.group");
		return new DefaultKafkaConsumerFactory<>(configs);
	}

	@Bean
	@ConditionalOnMissingBean
	public RetryPolicy retryPolicy() {
		return new SimpleRetryPolicy(8,
				Collections.<Class<? extends Throwable>, Boolean>singletonMap(Throwable.class, true));
	}

	@Bean
	@ConditionalOnMissingBean
	public RetryTemplate retryTemplate() {
		RetryTemplate retryTemplate = new RetryTemplate();
		retryTemplate.setThrowLastExceptionOnExhausted(true);
		retryTemplate.setRetryPolicy(retryPolicy());
		retryTemplate.setBackOffPolicy(new NoBackOffPolicy());
		return retryTemplate;
	}

	@Bean
	@Primary
	public KafkaListenerContainerFactory<?> kafkaListenerContainerFactory(MessageConverter messageConverter,
			ConsumerFactory<String, byte[]> consumerFactory) {
		ConcurrentKafkaListenerContainerFactory<String, byte[]> containerFactory = new ConcurrentKafkaListenerContainerFactory<>();
		containerFactory.setConsumerFactory(consumerFactory);
		containerFactory.setMessageConverter(messageConverter);
		containerFactory.setAutoStartup(true);
		containerFactory.setPhase(0);
		containerFactory.setConcurrency(1); // 并行处理数量
		containerFactory.setAckDiscarded(true);
		containerFactory.setRetryTemplate(retryTemplate());
		return containerFactory;
	}

	@Bean
	public KafkaListenerContainerFactory<?> kafkaBatchListenerContainerFactory(MessageConverter batchMessageConverter,
			ConsumerFactory<String, byte[]> consumerFactory) {
		ConcurrentKafkaListenerContainerFactory<String, byte[]> batchContainerFactory = new ConcurrentKafkaListenerContainerFactory<>();
		batchContainerFactory.setConsumerFactory(consumerFactory);
		batchContainerFactory.setMessageConverter(batchMessageConverter);
		batchContainerFactory.setAutoStartup(true);
		batchContainerFactory.setPhase(0);
		batchContainerFactory.setConcurrency(1); // 并行处理数量
		batchContainerFactory.setAckDiscarded(true);
		batchContainerFactory.setBatchListener(true);
		return batchContainerFactory;
	}

	private Object convertValue(ObjectMapper objectMapper, Object value, JavaType javaType) {
		if (javaType.hasRawClass(value.getClass()))
			return value;
		try {
			if (value instanceof byte[]) {
				if (javaType.hasRawClass(String.class))
					return new String((byte[]) value);
				return objectMapper.readValue((byte[]) value, javaType);
			}
			if (value instanceof String) {
				if (javaType.hasRawClass(byte[].class))
					return ((String) value).getBytes(Charset.defaultCharset());
				return objectMapper.readValue((String) value, javaType);
			}
		} catch (IOException e) {
			throw new ConversionException("Failed to convert from JSON", e);
		}
		throw new IllegalStateException("Only String or byte[] supported");
	}

	@Bean
	public MessageConverter messageConverter(ObjectMapper objectMapper) {
		return new MessagingMessageConverter() {

			private final Map<Type, JavaType> cacheJavaType = new LinkedHashMap<>();

			private JavaType findJavaType(Type type) {
				JavaType javaType = cacheJavaType.get(type);
				if (javaType == null) {
					javaType = TypeFactory.defaultInstance().constructType(type);
					cacheJavaType.put(type, javaType);
				}
				return javaType;
			}

			@Override
			protected Object extractAndConvertValue(ConsumerRecord<?, ?> record, Type type) {
				Object value = record.value();
				if (record.value() == null) {
					return KafkaNull.INSTANCE;
				}
				JavaType javaType = findJavaType(type);
				return convertValue(objectMapper, value, javaType);
			}
		};
	}

	@Bean
	public MessageConverter batchMessageConverter(ObjectMapper objectMapper) {
		return new BatchMessagingMessageConverter() {

			private final Map<Type, JavaType> cacheJavaType = new LinkedHashMap<>();

			private JavaType findJavaType(Type type) {
				JavaType javaType = cacheJavaType.get(type);
				if (javaType == null) {
					javaType = TypeFactory.defaultInstance().constructType(type);
					if (javaType.isCollectionLikeType() || javaType.isArrayType()) {
						javaType = javaType.getContentType();
						cacheJavaType.put(type, javaType);
					} else
						throw new IllegalStateException("批量消费，接收数据的对象必须是数组或者是集合");
				}
				return javaType;
			}

			@Override
			protected Object extractAndConvertValue(ConsumerRecord<?, ?> record, Type type) {
				Object value = record.value();
				if (value == null) {
					return KafkaNull.INSTANCE;
				}
				JavaType javaType = findJavaType(type);
				return convertValue(objectMapper, value, javaType);
			}
		};
	}
}
