package com.jugheadzhou.kafka.core.config;

import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.encoder.PatternLayoutEncoder;
import ch.qos.logback.classic.spi.ILoggingEvent;
import com.github.danielwegener.logback.kafka.KafkaAppender;
import com.github.danielwegener.logback.kafka.delivery.AsynchronousDeliveryStrategy;
import com.github.danielwegener.logback.kafka.keying.NoKeyKeyingStrategy;
import com.jugheadzhou.kafka.core.logger.AsyncKafkaLogger;
import com.jugheadzhou.kafka.core.manager.KafkaMessageManager;
import com.jugheadzhou.kafka.core.properties.KafkaLoggerProperties;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.util.CollectionUtils;

import java.util.List;

/**
 * kafka logger 自动配置类
 *
 * @author jugheadzhou
 * @date 2021-09-23
 */
@Slf4j
@Configuration
@EnableConfigurationProperties(KafkaLoggerProperties.class)
public class AutoKafkaAppenderConfiguration {

    private final KafkaLoggerProperties properties;

    public AutoKafkaAppenderConfiguration(KafkaLoggerProperties properties) {
        this.properties = properties;
    }

    /**
     * 注入自定义 kafka logger
     *
     * @return
     */
    @Bean("asyncKafkaLogger")
    @ConditionalOnProperty(name = "logger.kafka.enabled", havingValue = "true")
    @ConditionalOnClass(KafkaAppender.class)
    public Logger asyncKafkaLogger() {
        log.info("Starting AsyncKafkaLogger AutoConfig.");
        Logger logger = (Logger) LoggerFactory.getLogger(AsyncKafkaLogger.class);
        LoggerContext lc = logger.getLoggerContext();
        KafkaAppender<ILoggingEvent> kafkaAppender = new KafkaAppender<>();
        PatternLayoutEncoder encoder = new PatternLayoutEncoder();
        encoder.setPattern(properties.getPattern());
        encoder.setContext(lc);
        encoder.start();
        kafkaAppender.setName("AsyncKafkaAppender");
        kafkaAppender.setTopic(properties.getTopic());
        kafkaAppender.setEncoder(encoder);
        kafkaAppender.setKeyingStrategy(new NoKeyKeyingStrategy());
        kafkaAppender.setDeliveryStrategy(new AsynchronousDeliveryStrategy());
        kafkaAppender.setPartition(properties.getPartition());
        kafkaAppender.setAppendTimestamp(properties.getAppendTimestamp());
        kafkaAppender.setContext(lc);
        List<String> producerConfigMap = properties.getProducerConfig();
        if (CollectionUtils.isEmpty(producerConfigMap)) {
            throw new RuntimeException("Kafka Logger producerConfig property cannot be empty");
        }
        producerConfigMap.forEach(kafkaAppender::addProducerConfig);
        kafkaAppender.start();
        logger.addAppender(kafkaAppender);
        // 此处调用一次打印目的是初始化加载生产者，避免项目启动后第一条消息丢失
        logger.info("Initialize AsyncKafkaLogger for kafka-producer.");
        return logger;
    }


    /**
     * 注入Kafka代码方式发送消息的bean
     *
     * @return
     */
    @Bean("kafkaMessageManager")
    @ConditionalOnClass(KafkaTemplate.class)
    public KafkaMessageManager kafkaMessageManager() {

        return new KafkaMessageManager();
    }

}
