package com.open.source.kafka;

import com.open.source.MqSwitchControlConditionInitializer;
import java.util.Map;
import java.util.Properties;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.BeansException;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.listener.MessageListener;
import org.springframework.util.StringUtils;

/**
 * 手动提交
 *
 * @author ZonLen since on 2022/7/29 下午12:22
 */

@Slf4j
public abstract class AbstractKafkaConsumerListener implements KafkaConsumerListener,
    MessageListener<String, String> {

  private boolean enableKafkaListener;

  protected ApplicationContext applicationContext;

  protected ConcurrentMessageListenerContainer<Object, Object> concurrentMessageListenerContainer;


  /**
   * 设置默认消费者组
   */
  public String groupId() {
    return applicationContext.getBean(KafkaProperties.class).buildConsumerProperties()
        .get(ConsumerConfig.GROUP_ID_CONFIG).toString();
  }


  @Override
  public void destroy() {
    if (!enableKafkaListener) {
      return;
    }
    concurrentMessageListenerContainer.destroy();
    log.info("kafka consumer topic <{}> custom listener stop", String.join(",", kafkaTopic()));
  }

  @Override
  public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
    this.applicationContext = applicationContext;
    this.enableKafkaListener = MqSwitchControlConditionInitializer.KAFKA
        .equalsIgnoreCase(applicationContext.getEnvironment()
            .getProperty(MqSwitchControlConditionInitializer.DEFAULT_MQ_SELECTED));
  }

  @Override
  public void run(ApplicationArguments args) {
    if (!enableKafkaListener) {
      return;
    }
    String topic = kafkaTopic();
    if (!StringUtils.hasText(topic)) {
      throw new RuntimeException("Non topics listener, please check");
    }
    topic = ApplicationContextHolder.getEnvironment().resolvePlaceholders(topic);
    final ContainerProperties containerProperties = new ContainerProperties(topic);
    final Map<String, Object> consumerPropertiesMap = applicationContext
        .getBean(KafkaProperties.class).buildConsumerProperties();
    final Properties consumerProperties = new Properties();
    consumerPropertiesMap.forEach(consumerProperties::put);
    consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, groupId());
    consumerProperties.put("value.deserializer.encoding", deserializerEncoding());
    consumerProperties
        .put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
    consumerProperties
        .put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
    consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    containerProperties.setKafkaConsumerProperties(consumerProperties);
    configureConsumer(containerProperties);
    containerProperties.setMessageListener(this);
    containerProperties.setGroupId(groupId());

    concurrentMessageListenerContainer = new ConcurrentMessageListenerContainer<>(
        consumerFactory(), containerProperties);
    concurrentMessageListenerContainer.setRecordInterceptor(
        applicationContext.getBean(KafkaCompositeRecordInterceptorChain.class));
    concurrentMessageListenerContainer.start();
  }

  protected void configureConsumer(ContainerProperties containerProperties) {
  }

}
