package com.hhf.ds.service;

import com.alibaba.druid.pool.DruidDataSource;
import com.hhf.ds.config.DruidPoolConfig;
import com.hhf.ds.entity.Ds;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.sql.Connection;
import java.sql.SQLException;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

@Slf4j
@Service
public class PoolService {

    private static Map<Integer, DruidDataSource> poolCache = new ConcurrentHashMap();

    @Autowired
    private DruidPoolConfig druidPool;

    public DruidDataSource dataSource(Ds ds, DruidPoolConfig pool) {
        DruidDataSource datasource = new DruidDataSource();
        datasource.setUrl(ds.getJdbcUrl());
        datasource.setUsername(ds.getUsername());
        datasource.setPassword(ds.getPassword());
        datasource.setDriverClassName(ds.getDriverName());
        //pool config
        datasource.setInitialSize(pool.getInitialSize());
        datasource.setMinIdle(pool.getMinIdle());
        datasource.setMaxActive(pool.getMaxActive());
        datasource.setMaxWait(pool.getMaxWait());
        datasource.setTimeBetweenEvictionRunsMillis(pool.getTimeBetweenEvictionRunsMillis());
        datasource.setMinEvictableIdleTimeMillis(pool.getMinEvictableIdleTimeMillis());
        datasource.setTestWhileIdle(pool.isTestWhileIdle());
        datasource.setPoolPreparedStatements(pool.isPoolPreparedStatements());
        datasource.setMaxPoolPreparedStatementPerConnectionSize(pool.getMaxPoolPreparedStatementPerConnectionSize());
        datasource.setConnectionErrorRetryAttempts(pool.getConnectionErrorRetryAttempts());
        datasource.setBreakAfterAcquireFailure(pool.isBreakAfterAcquireFailure());
        datasource.setTimeBetweenConnectErrorMillis(pool.getTimeBetweenConnectErrorMillis());
        return datasource;
    }

    public DruidDataSource dataSource(Ds ds) {
        return dataSource(ds, druidPool);
    }

    public DruidDataSource getJdbcConnectionPool(Ds ds) {
        if (poolCache.containsKey(ds.getDsId())) {
            return poolCache.get(ds.getDsId());
        }
        DruidDataSource pool = dataSource(ds);
        poolCache.put(ds.getDsId(), pool);
        log.info("创建连接池成功：{}", ds.getJdbcUrl());
        return pool;
    }

    /**
     * 删除map中的数据库连接池
     *
     * @param id
     */
    public void removeJdbcConnectionPool(Long id) {
        try {
            DruidDataSource pool = poolCache.get(id);
            if (pool != null) {
                log.info("remove pool success, datasourceId:{}", id);
                poolCache.remove(id);
            }
        } catch (Exception e) {
            log.error("error", e);
        } finally {
        }
    }

    /**
     * 测试数据库连接  获取一个连接
     * @return Connection
     * @throws SQLException
     */
    public Connection getUnPooledConnection(Ds ds) throws SQLException {
        return dataSource(ds).getConnection();
    }

    /**
     * 获取连接
     *
     * @param ds
     * @return
     * @throws SQLException
     */
    public Connection getPooledConnection(Ds ds) throws SQLException{
        DruidDataSource pool = getJdbcConnectionPool(ds);
        return pool.getConnection();
    }

}
