/**
 * Project: apollo-base-dal
 * 
 * File Created at 2016年4月30日
 * 
 * Copyright 2015-2016 dx.com Croporation Limited.
 * All rights reserved.
 *
 * This software is the confidential and proprietary information of
 * DongXue software Company. ("Confidential Information").  You shall not
 * disclose such Confidential Information and shall use it only in
 * accordance with the terms of the license agreement you entered into
 * with dx.com.
 */
package com.dx.pf.dal.router.dataSource;

import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;

import javax.sql.DataSource;

import org.springframework.context.EnvironmentAware;
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidDataSourceFactory;
import com.alibaba.fastjson.JSON;
import com.dx.pf.dal.Logger;
import com.dx.pf.dal.exception.DalException;
import com.dx.pf.dal.router.rule.ShardRule;

/** 
* @ClassName: DataSourceFactory 
* @Description: 数据源工厂
* @author wuzhenfang(wzfbj2008@163.com)
* @date 2016年4月30日 上午11:57:24 
* @version V1.0 
*/
public abstract class DataSourceFactory{

	private final static Logger logger = Logger.getLogger(DataSourceFactory.class);
			
	public static Map<String, DataSource> dataSourcesMap = new ConcurrentHashMap<String, DataSource>();

	/** 数据配置信息，key=dbclusterName，value=shardingname */
	public Map<String, String> dbConfigPrefixs = new HashMap<String, String>();

	public String ruleFilePath = "";
	/**
	 * 初始化
	 */
	public abstract void init();

	
	/**
	 * 创建一个数据源
	 * @param properties
	 * @return
	 * @throws Exception
	 */
	public DataSource createSingleDataSource(Properties properties) throws Exception {
		return DruidDataSourceFactory.createDataSource(properties);
	}
	
	/**
	 * 创建数据源
	 * @param properties
	 * @return
	 * @throws DalException
	 */
	public boolean createDataSource(Properties properties) throws DalException {
		try {
			if (createSingleDataSource(properties) != null) {
				return true;
			}
		} catch (Exception e) {
			logger.error("初始化数据源失败。properties:" + JSON.toJSONString(properties), e);
		}
		return false;
	}
	/**
	 * 获取数据源
	 * @return 数据源
	 */
	public DruidDataSource getDataSource() {
		if (!dataSourcesMap.isEmpty()) {
			Map.Entry<String, DataSource> entry = dataSourcesMap.entrySet().iterator().next();
			return (DruidDataSource) entry.getValue();
		}
		return null;
	}

	/**
	 * 根据分片规则获取数据源
	 * @param shardRule
	 * @return
	 * @throws DalException
	 */
	public DruidDataSource getDataSource(ShardRule shardRule) throws DalException {
		if (!dataSourcesMap.isEmpty()) {
			return (DruidDataSource) dataSourcesMap.get(shardRule.getDBKay());
		}
		return null;
	}

	/**
	 * @return the ruleFilePath
	 */
	public String getRuleFilePath() {
		return ruleFilePath;
	}

	/**
	 * @param ruleFilePath the ruleFilePath to set
	 */
	public void setRuleFilePath(String ruleFilePath) {
		this.ruleFilePath = ruleFilePath;
	}

	/**
	 * @return the dbConfigPrefixs
	 */
	public Map<String, String> getDbConfigPrefixs() {
		return dbConfigPrefixs;
	}

	/**
	 * @param dbConfigPrefixs the dbConfigPrefixs to set
	 */
	public void setDbConfigPrefixs(Map<String, String> dbConfigPrefixs) {
		this.dbConfigPrefixs = dbConfigPrefixs;
	}
	
	/**
	 * 添加数据源
	 * @param dbKey
	 * @param datasource
	 */
	public void addDruidDataSource(String dbKey ,DataSource datasource){
		dataSourcesMap.put(dbKey, datasource);
	}
}
