package org.spiderflow.core.utils;

import com.alibaba.druid.pool.DruidDataSource;
import org.spiderflow.core.service.DataSourceService;

import javax.sql.DataSource;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

/**
 * 数据库连接工具类
 *
 * @author jmxd
 */
public class DataSourceUtils {

    private static final Map<String, DataSource> DATASOURCE_MAP = new ConcurrentHashMap<>();
    private static final DataSourceService dataSourceService = SpringUtils.getBean(DataSourceService.class);

    private DataSourceUtils() {
    }

    public static DataSource createDataSource(String className, String url, String username, String password) {
        DruidDataSource datasource = new DruidDataSource();
        datasource.setDriverClassName(className);
        datasource.setUrl(url);
        datasource.setUsername(username);
        datasource.setPassword(password);
        datasource.setDefaultAutoCommit(true);
        datasource.setMinIdle(1);
        datasource.setInitialSize(2);
        return datasource;
    }

    public static void remove(String dataSourceId) {
        DataSource dataSource = DATASOURCE_MAP.get(dataSourceId);
        if (dataSource != null) {
            DruidDataSource ds = (DruidDataSource) dataSource;
            ds.close();
            DATASOURCE_MAP.remove(dataSourceId);
        }
    }

    public static synchronized DataSource getDataSource(String dataSourceId) {
        return DATASOURCE_MAP.computeIfAbsent(dataSourceId, k -> {
            org.spiderflow.core.model.DataSource ds = dataSourceService.getById(dataSourceId);
            return ds == null ? null : createDataSource(ds.getDriverClassName(), ds.getJdbcUrl(),
                    ds.getUsername(), ds.getPassword());
        });
    }
}
