package com.dangz.dans.dynamicDataSource;

import com.alibaba.druid.pool.DruidDataSource;
import com.dangz.dans.utils.EncryptionDecryption;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.MutablePropertyValues;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.GenericBeanDefinition;
import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder;
import org.springframework.boot.bind.RelaxedPropertyResolver;
import org.springframework.context.EnvironmentAware;
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
import org.springframework.core.env.Environment;
import org.springframework.core.type.AnnotationMetadata;
import javax.sql.DataSource;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
/**
 * DynamicDataSourceRegister:  动态数据源注册
 */
public class DynamicDataSourceRegister
        implements ImportBeanDefinitionRegistrar, EnvironmentAware {

    private static final Logger logger = LoggerFactory.getLogger(DynamicDataSourceRegister.class);

    // 如配置文件中未指定数据源类型，使用该默认值
    private static final Object DATASOURCE_TYPE_DEFAULT = "com.alibaba.druid.pool.DruidDataSource";

    private static EncryptionDecryption des = EncryptionDecryption.getInstance();

    // 数据源
    private DataSource defaultDataSource;
    private Map customDataSources = new HashMap<>();

    @Override
    public void registerBeanDefinitions(AnnotationMetadata importingClassMetadata, BeanDefinitionRegistry registry) {
        Map targetDataSources = new HashMap();
        // 将主数据源添加到更多数据源中
        targetDataSources.put("dataSource", defaultDataSource);
        DynamicDataSourceContextHolder.dataSourceIds.add("dataSource");
        // 添加更多数据源
        targetDataSources.putAll(customDataSources);
        for (Object key : customDataSources.keySet()) {
            DynamicDataSourceContextHolder.dataSourceIds.add(key);
        }

        // 创建DynamicDataSource
        GenericBeanDefinition beanDefinition = new GenericBeanDefinition();
        beanDefinition.setBeanClass(DynamicDataSource.class);
        beanDefinition.setSynthetic(true);
        MutablePropertyValues mpv = beanDefinition.getPropertyValues();
        mpv.addPropertyValue("defaultTargetDataSource", defaultDataSource);
        mpv.addPropertyValue("targetDataSources", targetDataSources);
        registry.registerBeanDefinition("dataSource", beanDefinition);

        logger.info("Dynamic DataSource1 Registry");
    }

    /**
     * 创建DataSource
     * @param dsMap
     * @return
     */
    public DataSource buildDataSource(Map dsMap) throws Exception {
        try {
            Object type = dsMap.get("type");
            if (type == null) {
                type = DATASOURCE_TYPE_DEFAULT;// 默认DataSource
            }

            Class dataSourceType;
            dataSourceType = (Class) Class.forName((String) type);

            String driverClassName = dsMap.get("driver-class-name").toString();
            String url = dsMap.get("url").toString();
            String username = des.decrypt(dsMap.get("username").toString());
            String password = des.decrypt(dsMap.get("password").toString());
            //logger.info("数据库连接信息:username:{},password:{}",username,password);
            DataSourceBuilder factory = DataSourceBuilder.create().driverClassName(driverClassName).url(url)
                    .username(username).password(password).type(dataSourceType);
            if (type.toString().endsWith("DruidDataSource")) {
                DruidDataSource druidDataSource = (DruidDataSource) factory.build();
                setDruidInfo(druidDataSource, dsMap);
                return druidDataSource;
            }
            return factory.build();
        } catch (ClassNotFoundException e) {
            e.printStackTrace();
        }
        return null;
    }

    /**
     * 加载多数据源配置
     */
    @Override
    public void setEnvironment(Environment env) {
        try {
            initDefaultDataSource(env);
            initCustomDataSources(env);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 初始化主数据源
     * @param env
     */
    public void initDefaultDataSource(Environment env) throws Exception {
        // 读取主数据源
        RelaxedPropertyResolver propertyResolver = new RelaxedPropertyResolver(env, "spring.datasource.");
        RelaxedPropertyResolver druidProperty = new RelaxedPropertyResolver(env, "spring.datasource.druid.");
        Map dsMap = new HashMap<>();
        dsMap.put("type", propertyResolver.getProperty("type"));
        dsMap.put("driver-class-name", propertyResolver.getProperty("driver-class-name"));
        dsMap.put("url", propertyResolver.getProperty("url"));
        dsMap.put("username", propertyResolver.getProperty("username"));
        dsMap.put("password", propertyResolver.getProperty("password"));

        //druid连接池信息
        dsMap.put("initial-size", druidProperty.getProperty("initial-size"));
        dsMap.put("min-idle", druidProperty.getProperty("min-idle"));
        dsMap.put("max-active", druidProperty.getProperty("max-active"));
        dsMap.put("max-wait", druidProperty.getProperty("max-wait"));
        dsMap.put("filters", druidProperty.getProperty("filters"));
        dsMap.put("query-timeout", druidProperty.getProperty("query-timeout"));
        dsMap.put("time-between-eviction-runs-millis", druidProperty.getProperty("time-between-eviction-runs-millis"));
        dsMap.put("min-evictable-idle-time-millis", druidProperty.getProperty("min-evictable-idle-time-millis"));
        dsMap.put("validation-query", druidProperty.getProperty("validation-query"));
        dsMap.put("time-between-log-stats-millis", druidProperty.getProperty("time-between-log-stats-millis"));
        defaultDataSource = buildDataSource(dsMap);
    }


    /**
     * 初始化更多数据源
     * @param env
     */
    public void initCustomDataSources(Environment env) throws Exception {
        // 读取配置文件获取更多数据源，也可以通过defaultDataSource读取数据库获取更多数据源
        RelaxedPropertyResolver propertyResolver = new RelaxedPropertyResolver(env, "custom.datasource.");
        String dsPrefixs = propertyResolver.getProperty("names");
        for (String dsPrefix : dsPrefixs.split(",")) {// 多个数据源
            Map dsMap = propertyResolver.getSubProperties(dsPrefix + ".");
            customDataSources.put(dsPrefix, buildDataSource(dsMap));
        }
    }

    /**
     * 设置主数据源连接池信息
     * @param druidDataSource
     * @param dsMap
     * @throws SQLException
     */
    private void setDruidInfo(DruidDataSource druidDataSource, Map dsMap) throws SQLException {
        if (dsMap.get("filters") != null) {
            druidDataSource.setFilters(dsMap.get("filters").toString());
        } else {
            druidDataSource.setFilters("stat,wall");
        }
        if (dsMap.get("initial-size") != null) {
            druidDataSource.setInitialSize(Integer.parseInt(dsMap.get("initial-size").toString()));
        }
        if (dsMap.get("max-active") != null) {
            druidDataSource.setMaxActive(Integer.parseInt(dsMap.get("max-active").toString()));
        }
        if (dsMap.get("min-idle") != null) {
            druidDataSource.setMinIdle(Integer.parseInt(dsMap.get("min-idle").toString()));
        }
        if (dsMap.get("max-wait") != null) {
            druidDataSource.setMaxWait(Integer.parseInt(dsMap.get("max-wait").toString()));
        }
        if (dsMap.get("query-timeout") != null) {
            druidDataSource.setQueryTimeout(Integer.parseInt(dsMap.get("query-timeout").toString()));
        }
        if (dsMap.get("time-between-eviction-runs-millis") != null) {
            druidDataSource.setTimeBetweenEvictionRunsMillis(Integer.parseInt(dsMap.get("time-between-eviction-runs-millis").toString()));
        }
        if (dsMap.get("min-evictable-idle-time-millis") != null) {
            druidDataSource.setMinEvictableIdleTimeMillis(Integer.parseInt(dsMap.get("min-evictable-idle-time-millis").toString()));
        }
        if (dsMap.get("validation-query") != null) {
            druidDataSource.setValidationQuery(dsMap.get("validation-query").toString());
        }
        if (dsMap.get("time-between-log-stats-millis") != null) {
            druidDataSource.setTimeBetweenLogStatsMillis(Long.parseLong(dsMap.get("time-between-log-stats-millis").toString()));
        }
    }
}
