package com.tang.lts.core.store.datasource;

import com.alibaba.druid.pool.DruidDataSource;
import com.tang.lts.core.cluster.Config;
import com.tang.lts.core.constant.ExtConfig;

import javax.sql.DataSource;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

/**
 * @author Tang
 * @classname MysqlDataSourceProvider
 * @description [ mysql 方式 ]
 * @date 2022/5/6 14:26
 */
public class MysqlDataSourceProvider implements DataSourceProvider {

    private static final ConcurrentHashMap<String, DataSource> DATA_SOURCE_MAP = new ConcurrentHashMap<>();

    private static final Object lock = new Object();

    @Override
    public DataSource getDataSource(Config config) {
        String url = config.getParameter(ExtConfig.JDBC_URL);
        String username = config.getParameter(ExtConfig.JDBC_USERNAME);
        String password = config.getParameter(ExtConfig.JDBC_PASSWORD);

        String cachedKey = url + username + password;
        DataSource dataSource = DATA_SOURCE_MAP.get(cachedKey);
        if (dataSource == null) {
            synchronized (lock) {
                dataSource = DATA_SOURCE_MAP.get(cachedKey);
                if (dataSource != null) {
                    return dataSource;
                }
                // 创建数据源
                dataSource = createDruidDataSource(config);
                DATA_SOURCE_MAP.put(cachedKey, dataSource);
            }
        }
        return dataSource;
    }

    private DataSource createDruidDataSource(Config config) {
        DruidDataSource druidDataSource = new DruidDataSource();
        Class<DruidDataSource> aClass = DruidDataSource.class;
        // 反射设置参数
        for (Map.Entry<String, Class<?>> entry : FIELDS.entrySet()) {
            String field = entry.getKey();
            try {
                Method method = aClass.getMethod(field, entry.getValue());
                method.invoke(druidDataSource, config.getParameter("druid." + field));
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
        return druidDataSource;
    }

    private static final Map<String, Class<?>> FIELDS = new HashMap<>();

    static {
        FIELDS.put("initialSize", Integer.class);
        FIELDS.put("maxActive", Integer.class);
        FIELDS.put("maxIdle", Integer.class);
        FIELDS.put("minIdle", Integer.class);
        FIELDS.put("maxWait", Integer.class);
        FIELDS.put("poolPreparedStatements", Boolean.class);
        FIELDS.put("maxOpenPreparedStatements", Integer.class);
        FIELDS.put("validationQuery", String.class);
        FIELDS.put("testOnBorrow", Boolean.class);
        FIELDS.put("testOnReturn", Boolean.class);
        FIELDS.put("testWhileIdle", Boolean.class);
        FIELDS.put("timeBetweenEvictionRunsMillis", Long.class);
        FIELDS.put("numTestsPerEvictionRun", Integer.class);
        FIELDS.put("minEvictableIdleTimeMillis", Long.class);
        FIELDS.put("exceptionSorter", String.class);
        FIELDS.put("filters", String.class);
    }

}