package com.angel.bo.admin.config.jdbc;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.fastjson.JSON;
import com.angel.bo.admin.properties.DruidOperationProperties;
import com.angel.bo.admin.properties.DruidReadOnlyProperties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.boot.web.servlet.ServletRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;

import javax.sql.DataSource;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;

/**
 * @Author: Haibo.Mei [Haibo.Mei@ge.com]
 * @Description:数据源配置类
 * @Date: 17:06 2017/11/9
 */
@Configuration
public class DruidConfiguration {
    private static final Logger LOGGER = LoggerFactory.getLogger(DruidConfiguration.class);

    private DruidOperationProperties druidOperationProperties;
    private DruidReadOnlyProperties druidReadOnlyProperties;


    @Autowired
    public DruidConfiguration(DruidOperationProperties druidOperationProperties, DruidReadOnlyProperties druidReadOnlyProperties) {
        this.druidOperationProperties = druidOperationProperties;
        this.druidReadOnlyProperties = druidReadOnlyProperties;

    }

    /**
     * 注册一个StatViewServlet
     */
    @Bean(name = "druidStatViewServlet")
    public ServletRegistrationBean druidStatViewServlet() {
        return DruidMonitorConfig.druidStatViewServlet();
    }

    /**
     * 注册Druid的Stat过滤器
     */
    @Bean(name = "druidStatFilter")
    public FilterRegistrationBean druidStatFilter() {
        return DruidMonitorConfig.druidStatFilter();
    }

    /**
     * Date 2017/5/27
     * Author 邱健[qiujian12150@sinosoft.com.cn]
     * <p>
     * 方法说明:只读库
     *
     * @return BasicDataSource
     * @throws Exception 构建BasicDataSource异常
     */
    @Bean(name = "readOnlyDataSource")
    @Qualifier("readOnlyDataSource")
    @Primary
    public DataSource readOnlyDataSource() throws Exception {
        return getDataSource(druidReadOnlyProperties.getUrl(), druidReadOnlyProperties.getUsername(), druidReadOnlyProperties.getPassword(), JSON.toJSONString(druidReadOnlyProperties), druidReadOnlyProperties.getDriverClassName(), druidReadOnlyProperties.getInitialSize(), druidReadOnlyProperties.getMinIdle(), druidReadOnlyProperties.getMaxActive(), druidReadOnlyProperties.getMaxWait(), druidReadOnlyProperties.getTimeBetweenEvictionRunsMillis(), druidReadOnlyProperties.getMinEvictableIdleTimeMillis(), druidReadOnlyProperties.getValidationQuery(), druidReadOnlyProperties.isTestWhileIdle(), druidReadOnlyProperties.isTestOnBorrow(), druidReadOnlyProperties.isTestOnReturn(), druidReadOnlyProperties.isPoolPreparedStatements(), druidReadOnlyProperties.getMaxPoolPreparedStatementPerConnectionSize(), druidReadOnlyProperties.getFilters(), druidReadOnlyProperties.getConnectionProperties());
    }

    /**
     * Date 2017/5/27
     * Author 邱健[qiujian12150@sinosoft.com.cn]
     * <p>
     * 方法说明:可写库
     *
     * @return BasicDataSource
     * @throws Exception 构建BasicDataSource异常
     */
    @Bean(name = "operationDataSource")
    @Qualifier("operationDataSource")
    public DataSource operationDataSource() throws Exception {
        return getDataSource(druidOperationProperties.getUrl(), druidOperationProperties.getUsername(), druidOperationProperties.getPassword(), JSON.toJSONString(druidOperationProperties), druidOperationProperties.getDriverClassName(), druidOperationProperties.getInitialSize(), druidOperationProperties.getMinIdle(), druidOperationProperties.getMaxActive(), druidOperationProperties.getMaxWait(), druidOperationProperties.getTimeBetweenEvictionRunsMillis(), druidOperationProperties.getMinEvictableIdleTimeMillis(), druidOperationProperties.getValidationQuery(), druidOperationProperties.isTestWhileIdle(), druidOperationProperties.isTestOnBorrow(), druidOperationProperties.isTestOnReturn(), druidOperationProperties.isPoolPreparedStatements(), druidOperationProperties.getMaxPoolPreparedStatementPerConnectionSize(), druidOperationProperties.getFilters(), druidOperationProperties.getConnectionProperties());
    }


    private DataSource getDataSource(String url, String username, String password, String x, String driverClassName, int initialSize, int minIdle, int maxActive, int maxWait, int timeBetweenEvictionRunsMillis, int minEvictableIdleTimeMillis, String validationQuery, boolean testWhileIdle, boolean testOnBorrow, boolean testOnReturn, boolean poolPreparedStatements, int maxPoolPreparedStatementPerConnectionSize, String filters, String connectionProperties) {
        LOGGER.info("[DATASOURCE]-[USERNAME:{}],[URL:{}]",
                username,
                url);
        DruidDataSource datasource = new DruidDataSource();
        datasource.setUrl(url);
        datasource.setUsername(username);
        datasource.setPassword(password);
        datasource.setDriverClassName(driverClassName);
        datasource.setInitialSize(initialSize);
        datasource.setMinIdle(minIdle);
        datasource.setMaxActive(maxActive);
        datasource.setMaxWait(maxWait);
        datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
        datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
        datasource.setValidationQuery(validationQuery);
        datasource.setTestWhileIdle(testWhileIdle);
        datasource.setTestOnBorrow(testOnBorrow);
        datasource.setTestOnReturn(testOnReturn);
        datasource.setPoolPreparedStatements(poolPreparedStatements);
        datasource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
        try {
            datasource.setFilters(filters);
        } catch (SQLException e) {
            LOGGER.error("druid configuration initialization filter", e);
        }
        datasource.setConnectionProperties(connectionProperties);
        return datasource;
    }



    @Bean
    public DynamicDataSource dataSource(
            @Qualifier("readOnlyDataSource") DataSource readOnlyDataSource,
            @Qualifier("operationDataSource") DataSource operationDataSource) {
        Map<Object, Object> targetDataSources = new HashMap<>(3);
        targetDataSources.put(DataBaseSourceType.READ_ONLY_DATASOURCE, readOnlyDataSource);
        targetDataSources.put(DataBaseSourceType.OPERATION_DATASOURCE, operationDataSource);
        DynamicDataSource dataSource = new DynamicDataSource();
        dataSource.setTargetDataSources(targetDataSources);
        dataSource.setDefaultTargetDataSource(readOnlyDataSource);
        return dataSource;
    }

}
