package com.flowcloud.kafka.config;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.listener.adapter.RecordFilterStrategy;
import org.springframework.kafka.support.serializer.JsonDeserializer;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Function;

/**
 * @program: uip
 * @description: 监听容器工厂构造器
 * @author: wuwenbin
 * @create: 2020-12-18 23:28
 */
public class ListenerContainerFactoryBuilder<K, V> {

	private Map<String, Object> consumerProperties;
	private final List<Consumer<Map<String, Object>>> consumerPropertiesConfigurer = new ArrayList<>();
	private final List<Consumer<ConcurrentKafkaListenerContainerFactory<K, V>>> listenerPropertiesConfigurer = new ArrayList<>();

	private final KafkaProperties kafkaProperties;
	private ConcurrentKafkaListenerContainerFactory<K, V> listenerContainerFactory;
	private Function<Map<String, Object>, ConsumerFactory<K, V>> consumerFactoryCreator;

	private ListenerContainerFactoryBuilder(KafkaProperties kafkaProperties) {
		this.kafkaProperties = kafkaProperties;
	}

	public static <K, V> ListenerContainerFactoryBuilder<K, V> of(KafkaProperties kafkaProperties) {
		return new ListenerContainerFactoryBuilder<>(kafkaProperties);
	}

	public ListenerContainerFactoryBuilder<K, V> withConsumerFactoryCreator(
			Function<Map<String, Object>, ConsumerFactory<K, V>> consumerFactoryCreator) {
		this.consumerFactoryCreator = consumerFactoryCreator;
		return this;
	}

	public ListenerContainerFactoryBuilder<K, V> init() {
		consumerProperties = kafkaProperties.buildConsumerProperties();
		consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
		consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);

		// max.poll.interval.ms=600000,默认5分钟
		consumerProperties.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, 600000);
		consumerProperties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 15000);
		consumerProperties.put(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG, 15000);
		return this;
	}

	/**
	 * 配置consumer
	 *
	 * @param key
	 * @param value
	 * @return
	 */
	public ListenerContainerFactoryBuilder<K, V> withConsumerProperty(String key, Object value) {
		Consumer<Map<String, Object>> c = map -> map.put(key, value);
		consumerPropertiesConfigurer.add(c);
		return this;
	}

	/**
	 * 配置并发
	 *
	 * @param concurrency
	 * @return
	 */
	public ListenerContainerFactoryBuilder<K, V> withListenerConcurrency(Integer concurrency) {
		Consumer<ConcurrentKafkaListenerContainerFactory<K, V>> c = listener -> listener.setConcurrency(concurrency);
		listenerPropertiesConfigurer.add(c);
		return this;
	}

	/**
	 * 配置批量
	 *
	 * @param batchListener
	 * @return
	 */
	public ListenerContainerFactoryBuilder<K, V> withListenerBatchListener(Boolean batchListener) {
		Consumer<ConcurrentKafkaListenerContainerFactory<K, V>> c = listener -> listener.setBatchListener(batchListener);
		listenerPropertiesConfigurer.add(c);
		return this;
	}

	/**
	 * 是否丢弃过滤的消息
	 *
	 * @param discarded
	 * @return
	 */
	public ListenerContainerFactoryBuilder<K, V> withListenerAckDiscarded(Boolean discarded) {
		Consumer<ConcurrentKafkaListenerContainerFactory<K, V>> c = listener -> listener.setAckDiscarded(discarded);
		listenerPropertiesConfigurer.add(c);
		return this;
	}

	/**
	 * 消息过滤策略实现
	 *
	 * @param strategy
	 * @return
	 */
	public ListenerContainerFactoryBuilder<K, V> withListenerRecordFilterStrategy(RecordFilterStrategy<K, V> strategy) {
		Consumer<ConcurrentKafkaListenerContainerFactory<K, V>> c = listener -> listener.setRecordFilterStrategy(strategy);
		listenerPropertiesConfigurer.add(c);
		return this;
	}

	/**
	 * 设置AckTime 对应的AckMode 为TIME
	 *
	 * @param time
	 * @return
	 */
	public ListenerContainerFactoryBuilder<K, V> withListenerAckTime(long time) {
		Consumer<ConcurrentKafkaListenerContainerFactory<K, V>> c = listener -> listener.getContainerProperties().setAckTime(time);
		listenerPropertiesConfigurer.add(c);
		return this;
	}

	/**
	 * 设置AckCount 对应的AckMode 为COUNT
	 *
	 * @param count
	 * @return
	 */
	public ListenerContainerFactoryBuilder<K, V> withListenerAckCount(int count) {
		Consumer<ConcurrentKafkaListenerContainerFactory<K, V>> c = listener -> listener.getContainerProperties().setAckCount(count);
		listenerPropertiesConfigurer.add(c);
		return this;
	}

	/**
	 * 设置AckMode, 如果设置为MANUAL或者MANUAL_IMMEDIATE自动将auto commit设置为false
	 *
	 * @param mode
	 * @return
	 */
	public ListenerContainerFactoryBuilder<K, V> withListenerAckMode(ContainerProperties.AckMode mode) {
		Consumer<ConcurrentKafkaListenerContainerFactory<K, V>> c = listener -> listener.getContainerProperties().setAckMode(mode);
		listenerPropertiesConfigurer.add(c);
		if (mode.equals(ContainerProperties.AckMode.MANUAL) ||
				mode.equals(ContainerProperties.AckMode.MANUAL_IMMEDIATE)) {
			Consumer<Map<String, Object>> c1 = map -> map.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
			consumerPropertiesConfigurer.add(c1);
		}
		return this;
	}

	/**
	 * 设置拉取超时时间
	 *
	 * @param timeout
	 * @return
	 */
	public ListenerContainerFactoryBuilder<K, V> withListenerPollTimeout(long timeout) {
		Consumer<ConcurrentKafkaListenerContainerFactory<K, V>> c = listener -> listener.getContainerProperties().setPollTimeout(timeout);
		listenerPropertiesConfigurer.add(c);
		return this;
	}

	/**
	 * 设置组id
	 *
	 * @param groupId
	 * @return
	 */
	public ListenerContainerFactoryBuilder<K, V> withConsumerGroupId(String groupId) {
		Consumer<Map<String, Object>> c = map -> map.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
		consumerPropertiesConfigurer.add(c);
		return this;
	}

	/**
	 * 设置消费模式
	 *
	 * @param reset
	 * @return
	 */
	public ListenerContainerFactoryBuilder<K, V> withConsumerOffsetReset(String reset) {
		Consumer<Map<String, Object>> c = map -> map.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, reset);
		consumerPropertiesConfigurer.add(c);
		return this;
	}

	/**
	 * 设置每次拉取的最大记录数
	 *
	 * @param records
	 * @return
	 */
	public ListenerContainerFactoryBuilder<K, V> withConsumerMaxPollRecords(Integer records) {
		Consumer<Map<String, Object>> c = map -> map.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, records);
		consumerPropertiesConfigurer.add(c);
		return this;
	}

	/**
	 * 设置是否自动提交，注意设置为false之后并不意味着需要手动提交，除非ackmode设置manual，仍旧会自动提交，不过提交模式是BATCH
	 *
	 * @param autoCommit
	 * @return
	 */
	public ListenerContainerFactoryBuilder<K, V> withConsumerAutoCommit(Boolean autoCommit) {
		Consumer<Map<String, Object>> c = map -> map.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, autoCommit);
		consumerPropertiesConfigurer.add(c);
		return this;
	}

	/**
	 * 创建ConsumerFactory
	 *
	 * @return
	 */
	public ConsumerFactory<K, V> buildConsumerFactory() {
		consumerPropertiesConfigurer.forEach(e -> e.accept(consumerProperties));
		consumerPropertiesConfigurer.clear();
		return consumerFactoryCreator.apply(consumerProperties);
	}

	public ConcurrentKafkaListenerContainerFactory<K, V> build() {
		consumerPropertiesConfigurer.forEach(e -> e.accept(consumerProperties));
		consumerPropertiesConfigurer.clear();
		ConsumerFactory<K, V> consumerFactory = consumerFactoryCreator.apply(consumerProperties);
		listenerContainerFactory = new ConcurrentKafkaListenerContainerFactory<>();
		listenerContainerFactory.setConsumerFactory(consumerFactory);
		listenerPropertiesConfigurer.forEach(e -> e.accept(listenerContainerFactory));
		listenerPropertiesConfigurer.clear();
		return listenerContainerFactory;
	}
}
