package org.hilo.boot.app.config;

import java.sql.SQLException;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import javax.sql.DataSource;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.actuate.autoconfigure.metrics.MetricsAutoConfiguration;
import org.springframework.boot.actuate.autoconfigure.metrics.export.simple.SimpleMetricsExportAutoConfiguration;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.jdbc.metadata.DataSourcePoolMetadataProvider;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;

import com.alibaba.druid.pool.DruidDataSource;

import org.hilo.boot.core.db.DynamicRoutingDataSource;
import org.hilo.boot.core.db.ExtDataSource;
import org.hilo.boot.core.db.metrics.DruidDataSourcePoolMetadata;
import org.hilo.boot.core.db.metrics.DruidMetrics;

/**
 * 
 * @author zollty
 * @since 2020年9月28日
 */
@Configuration
@ConditionalOnClass(name = "io.micrometer.core.instrument.MeterRegistry")
@AutoConfigureAfter({ MetricsAutoConfiguration.class, DataSourceAutoConfiguration.class,
        SimpleMetricsExportAutoConfiguration.class, DruidDataSourceConfig.class })
public class DruidMetricsConfiguration {
    private static final Logger LOGGER = LoggerFactory.getLogger(DruidMetricsConfiguration.class);
    
    @Bean("druidMetrics")
    @ConditionalOnBean(DynamicRoutingDataSource.class) // 多数据源
    public DruidMetrics druidMetrics2(DynamicRoutingDataSource dataSource) {
        Map<Object, ExtDataSource> targetDatasources = dataSource.getTargetDataSources();
        Set<DruidDataSource> druidDataSourceList = new HashSet<>();
        for (Map.Entry<Object, ExtDataSource> entry : targetDatasources.entrySet()) {
            ExtDataSource ds = entry.getValue();
            druidDataSourceList.add((DruidDataSource) ds.getDataSource());
        }
        return new DruidMetrics(druidDataSourceList);
    }

    @Bean("druidMetrics")
    @ConditionalOnMissingBean(DynamicRoutingDataSource.class)
    public DruidMetrics druidMetrics(List<DataSource> dataSourceList) {
        LOGGER.info("Found {} datasources", dataSourceList.size());
        Set<DruidDataSource> druidDataSourceList = new HashSet<>();

        for (DataSource dataSource : dataSourceList) {
            if (dataSource instanceof AbstractRoutingDataSource) {
                try {
                    if (dataSource.isWrapperFor(DruidDataSource.class)) {
                        druidDataSourceList.add(dataSource.unwrap(DruidDataSource.class));
                    }
                } catch (SQLException e) {
                    LOGGER.warn("DataSource is not DruidDataSource", e);
                }
            } else if (dataSource instanceof DruidDataSource) {
                druidDataSourceList.add((DruidDataSource) dataSource);
            } else {
                LOGGER.info("None DruidDataSource found");
            }
        }

        for (DruidDataSource druidDataSource : druidDataSourceList) {
            LOGGER.info("Exporting metrics for {} datasource", druidDataSource.getName());
        }

        return new DruidMetrics(druidDataSourceList);
    }

    @Bean
    public DataSourcePoolMetadataProvider druidPoolDataSourceMetadataProvider() {
        return dataSource -> {
            if (dataSource instanceof AbstractRoutingDataSource) {
                try {
                    if (dataSource.isWrapperFor(DruidDataSource.class)) {
                        return new DruidDataSourcePoolMetadata(
                                dataSource.unwrap(DruidDataSource.class));
                    }
                } catch (SQLException e) {
                    LOGGER.error("Initialize DataSourcePoolMetadataProvider error", e);
                }
            }

            if (dataSource instanceof DruidDataSource) {
                DruidDataSource druidDataSource = (DruidDataSource) dataSource;
                return new DruidDataSourcePoolMetadata(druidDataSource);
            }

            return null;
        };
    }

}
