package com.idanchuang.count.core.config;

import com.idanchuang.count.core.deserializer.SimpleCountIncrEventDeserializer;
import com.idanchuang.count.core.entity.SimpleCountIncrEvent;
import com.idanchuang.count.dao.CounterAnalyzeDao;
import com.idanchuang.count.dao.impl.CounterAnalyzeDaoImpl;
import com.idanchuang.count.listener.SimpleCountAnalyzeListener;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.listener.SeekToCurrentBatchErrorHandler;

import javax.sql.DataSource;
import java.util.Map;

/**
 * @author Richard_yyf
 * @version 1.0 2021/10/12
 */
@Configuration
@Slf4j
@ConditionalOnProperty(name = "count.simple.analyze-enable", havingValue = "true")
@EnableConfigurationProperties(CountAnalyzeProperties.class)
public class CountAnalyzeConfiguration {

    @Bean
    public DataSource dataSource(CountAnalyzeProperties countAnalyzeProperties) {
        HikariConfig config = new HikariConfig();
        config.setDriverClassName(countAnalyzeProperties.getDriverClassName());
        config.setJdbcUrl(countAnalyzeProperties.getJdbcUrl());
        config.setUsername(countAnalyzeProperties.getUsername());
        config.setPassword(countAnalyzeProperties.getPassword());

        //minimum number of idle connection
        config.setMinimumIdle(3);
        //maximum number of connection in the pool
        config.setMaximumPoolSize(10);
        //maximum wait milliseconds for get connection from pool
        config.setConnectionTimeout(10000);
        // max idle time for recycle idle connection
        config.setIdleTimeout(60000);
        //validation query
        config.setConnectionTestQuery("select server_status()");
        //validation query timeout
        config.setValidationTimeout(3000);

        //create datasource
        HikariDataSource dataSource = new HikariDataSource(config);
        log.info("tdEngine datasource created");

        return dataSource;
    }

    @Bean
    public SimpleCountAnalyzeListener simpleCountAnalyzeListener(CounterAnalyzeDao counterAnalyzeDao) {
        return new SimpleCountAnalyzeListener(counterAnalyzeDao);
    }

    @Bean
    public CounterAnalyzeDao counterAnalyzeDao(DataSource dataSource) {
        return new CounterAnalyzeDaoImpl(dataSource);
    }
//
//    @Bean
//    public JdbcTemplate jdbcTemplate(DataSource dataSource) {
//        return new JdbcTemplate(dataSource);
//    }

    @Bean
    KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, SimpleCountIncrEvent>>
    simpleCountAnalyzeBatchConsumeContainerFactory(KafkaProperties kafkaProperties) {

        ConcurrentKafkaListenerContainerFactory<String, SimpleCountIncrEvent> factory = new ConcurrentKafkaListenerContainerFactory<>();

        Map<String, Object> config = kafkaProperties.buildConsumerProperties();
        config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, SimpleCountIncrEventDeserializer.class.getName());
        // 覆盖配置文件中的对应配置，这个场景下 max-poll-records = 500
        config.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 500);
        config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
        config.put(ConsumerConfig.GROUP_ID_CONFIG, "count-service-analyze");

        factory.setConsumerFactory(new DefaultKafkaConsumerFactory<>(config));
        factory.setConcurrency(1);
        // 就是要一直重试，如果有异常就会一直告警，不能忽略这个异常，要告警让人来处理
        factory.setBatchErrorHandler(new SeekToCurrentBatchErrorHandler());

        factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL);
        factory.getContainerProperties().setPollTimeout(kafkaProperties.getListener().getPollTimeout().toMillis());
        factory.setBatchListener(Boolean.TRUE);
        // 部分环境不开启
        factory.setAutoStartup(true);

        return factory;
    }

}
