package avicit.bdp.dcs.utils;

import avicit.bdp.common.datasource.BaseDataSource;
import avicit.bdp.common.datasource.BaseKerberosDataSource;
import avicit.bdp.common.datasource.DataSourceFactory;
import avicit.bdp.core.constant.Constants;
import avicit.bdp.dcs.datasource.dto.JobDatasource;
import com.alibaba.druid.pool.DruidDataSource;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.Connection;
import java.util.HashMap;
import java.util.Map;

/**
 * @金航数码科技有限责任公司
 * @作者：liyb
 * @邮箱：liyb@avic-digital.com
 * @创建时间： 2020-08-17 11:24
 * @类说明：
 * @修改记录：
 */
public class DatabaseUtil {

    private static final Logger logger = LoggerFactory.getLogger(DatabaseUtil.class);

    protected static Map<String, DruidDataSource> dataSourceMap = new HashMap<>();
    protected static Map<String, JobDatasource> jobDatasourceMap = new HashMap<>();

    /**
     * 通过连接池获取连接
     * @param jobDatasource
     * @return
     * @throws Exception
     */
    public static Connection getConnection(JobDatasource jobDatasource) throws Exception {
        BaseDataSource dbSource = DataSourceFactory.getDatasource(jobDatasource.getDataSourceDTO());

        // hive 需要kerberos认证
        if(JdbcConstants.HIVE.equals(jobDatasource.getDbType())){
            BaseKerberosDataSource hiveDataSource = (BaseKerberosDataSource) dbSource;
            if (hiveDataSource.getKerberos() != null && 1 == hiveDataSource.getKerberos()) {
                String krb5Path = hiveDataSource.getKrb5Path();
                String keytabPath = hiveDataSource.getKeytabPath();
                String principal = hiveDataSource.getPrincipal();
                String userName = hiveDataSource.getUser();
                logger.info("krb5Path={}", krb5Path);
                logger.info("keytabPath={}", keytabPath);
                logger.info("principal={}", principal);
                logger.info("userName={}", userName);

                System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF, krb5Path);
                Configuration configuration = new Configuration();
                configuration.setBoolean(Constants.HADOOP_SECURITY_AUTHORIZATION, true);
                configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION, Constants.KERBEROS);
                sun.security.krb5.Config.refresh();
                UserGroupInformation.setConfiguration(configuration);
//              UserGroupInformation.loginUserFromKeytab(principal, keytabPath);
                UserGroupInformation.loginUserFromKeytab(userName, keytabPath);
            }
        }

        Connection conn = null;
        DruidDataSource druidDataSource = null;
        String jobDatasourceId = jobDatasource.getId();
        String jobDatasourceKey = jobDatasource.getJdbcUrl() + jobDatasource.getJdbcUsername()
                + jobDatasource.getJdbcPassword();

        if(jobDatasourceMap.containsKey(jobDatasourceId)){
            JobDatasource datasourceCache = jobDatasourceMap.get(jobDatasourceId);
            String datasourceCacheKey = datasourceCache.getJdbcUrl() + datasourceCache.getJdbcUsername()
                    + datasourceCache.getJdbcPassword();

            // 数据源连接信息发生改变,重新创建连接池
            if(!jobDatasourceKey.equals(datasourceCacheKey)){
                dataSourceMap.remove(jobDatasourceId);
                druidDataSource = null;
            }
        }

        if (dataSourceMap.containsKey(jobDatasourceId)) {
            druidDataSource = dataSourceMap.get(jobDatasourceId);
            conn = druidDataSource.getConnection();
            logger.info("DatabaseUtil.getConnection={}", conn);

            // 获取数据源为空，则清空缓存
            if(conn==null){
                dataSourceMap.remove(jobDatasourceId);
                druidDataSource = null;
            }else{
                return conn;
            }
        }

        if (druidDataSource == null) {
            DruidDataSource dataSource = new DruidDataSource();
            dataSource.setUsername(jobDatasource.getJdbcUsername());
            dataSource.setPassword(jobDatasource.getJdbcPassword());
            dataSource.setUrl(jobDatasource.getJdbcUrl());
            dataSource.setDriverClassName(jobDatasource.getJdbcDriverClass());

            // 配置初始化大小、最小、最大
            dataSource.setInitialSize(5);
            dataSource.setMinIdle(5);
            dataSource.setMaxActive(50);

            // 配置获取连接等待超时的时间
            dataSource.setMaxWait(30000);

            // 配置间隔多久才进行一次检测，检测需要关闭的空闲连接，单位是毫秒
            dataSource.setTimeBetweenEvictionRunsMillis(60000);

            // 配置一个连接在池中最小生存的时间，单位是毫秒
            dataSource.setMinEvictableIdleTimeMillis(60000);
            dataSource.setTestWhileIdle(true);
            //dataSource.setValidationQuery("SELECT 1");
            dataSource.setTestOnBorrow(false);
            dataSource.setTestOnReturn(false);

            // 如果获取连接失败，则放弃获取直接跳出
            dataSource.setBreakAfterAcquireFailure(true);

            // 打开PSCache，并且指定每个连接上PSCache的大小
            dataSource.setPoolPreparedStatements(true);
            dataSource.setMaxPoolPreparedStatementPerConnectionSize(20);
            druidDataSource = dataSource;

            conn = druidDataSource.getConnection();
            dataSourceMap.put(jobDatasourceId, dataSource);
            jobDatasourceMap.put(jobDatasourceId, jobDatasource);
        }
        logger.info("DatabaseUtil.getConnection={}", conn);
        return conn;
    }

}
