package com.yoke.database;
/**
 * Created by jiangzeyin on 2017/1/6.
 */

import com.alibaba.druid.pool.DruidDataSourceFactory;
import com.yoke.system.log.LogType;
import com.yoke.system.log.SystemLog;
import com.yoke.util.PropertiesParser;
import org.springframework.context.EnvironmentAware;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import org.springframework.util.Assert;

import javax.sql.DataSource;
import java.io.FileInputStream;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;


/**
 * 数据源配置信息
 *
 * @author jiangzeyin
 * @create 2017 01 06 17:10
 */
@Configuration
public class DataSourceConfig implements EnvironmentAware {


    @Override
    public void setEnvironment(Environment environment) {
        this.sourceTagNames = environment.getProperty("dataSourceConfig.sourceTagNames", String[].class);
        this.configPath = environment.getProperty("dataSourceConfig.configPath", String[].class);
    }

    private String[] sourceTagNames;
    private String[] configPath;

    /**
     * 创建多数据源
     *
     * @return
     * @throws Exception
     */
    @Bean
    public DatabaseContextHolder dataSource() throws Exception {
        SystemLog.LOG(LogType.DEFAULT).info("初始化连接数据库");
        Assert.state(sourceTagNames != null, "数据源标记不能为空");
        Assert.notNull(configPath, "数据库连接信息不能为空");
        if (configPath.length < 1) {
            throw new IllegalArgumentException("数据库连接信息不能为空");
        }
        DatabaseContextHolder databaseContextHolder;
        if (configPath.length == 1) {
            Map concurrentHashMap = initConfigPath(configPath[0]);
            databaseContextHolder = new DatabaseContextHolder();
            //
            databaseContextHolder.setTargetDataSources(concurrentHashMap);
            // 默认数据源
            //Object defaultTarget = concurrentHashMap.get(sourceTagNames[0]);
            //databaseContextHolder.setDefaultTargetDataSource(defaultTarget);
        } else {
            Map[] maps = new ConcurrentHashMap[configPath.length];
            for (int i = 0; i < configPath.length; i++) {
                maps[i] = initConfigPath(configPath[i]);
            }
            databaseContextHolder = new DatabaseContextHolder(maps);
            Map default_ = maps[0];
            databaseContextHolder.setTargetDataSources(default_);
            // 默认数据源
            //Object defaultTarget = default_.get(sourceTagNames[0]);
            // databaseContextHolder.setDefaultTargetDataSource(defaultTarget);
        }
        return databaseContextHolder;
    }

    private Map<Object, Object> initConfigPath(String configPath) throws Exception {
        SystemLog.LOG().info(configPath);
        Properties properties = new Properties();
        FileInputStream fileInputStream = new FileInputStream(configPath);
        properties.load(fileInputStream);
        fileInputStream.close();
        PropertiesParser propertiesParser = new PropertiesParser(properties);
        Map<Object, Object> concurrentHashMap = new ConcurrentHashMap<>();
        for (String tag : sourceTagNames) {
            Properties properties_tag = propertiesParser.getPropertyGroup(tag, true);
            // 解密
            properties_tag.setProperty(DruidDataSourceFactory.PROP_URL, properties_tag.getProperty(DruidDataSourceFactory.PROP_URL));
            properties_tag.setProperty(DruidDataSourceFactory.PROP_USERNAME, properties_tag.getProperty(DruidDataSourceFactory.PROP_USERNAME));
            properties_tag.setProperty(DruidDataSourceFactory.PROP_PASSWORD, properties_tag.getProperty(DruidDataSourceFactory.PROP_PASSWORD));
            DataSource dataSource = DruidDataSourceFactory.createDataSource(properties_tag);
            concurrentHashMap.put(tag, dataSource);
        }
        return concurrentHashMap;
    }


}
