/**
 * Project: apollo-base-dal
 * 
 * File Created at 2016年5月7日
 * 
 * Copyright 2015-2016 dx.com Croporation Limited.
 * All rights reserved.
 *
 * This software is the confidential and proprietary information of
 * DongXue software Company. ("Confidential Information").  You shall not
 * disclose such Confidential Information and shall use it only in
 * accordance with the terms of the license agreement you entered into
 * with dx.com.
 */
package com.dx.pf.dal.router.dataSource;

import java.sql.SQLException;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;

import javax.sql.DataSource;

import org.apache.tools.ant.types.CommandlineJava.SysProperties;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.core.env.Environment;
import org.springframework.core.env.MapPropertySource;
import org.springframework.core.env.MutablePropertySources;
import org.springframework.core.env.PropertyResolver;
import org.springframework.core.env.PropertySource;
import org.springframework.core.env.PropertySources;
import org.springframework.core.env.PropertySourcesPropertyResolver;
import org.springframework.core.type.AnnotationMetadata;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidDataSourceFactory;
import com.alibaba.fastjson.JSON;
import com.dx.pf.commons.conf.PropertyConfigurer;
import com.dx.pf.commons.utils.StringUtil;
import com.dx.pf.dal.Logger;
import com.dx.pf.dal.conf.RelaxedPropertyResolver;
import com.dx.pf.dal.conf.db.DruidProperty;
import com.dx.pf.dal.conf.rule.RuleConfigXML;
import com.dx.pf.dal.exception.DalException;

/** 
* @ClassName: ApolloDataSourceFactory 
* @Description: Apollo数据源工厂
* @author wuzhenfang(wzfbj2008@163.com)
* @date 2016年5月7日 下午3:20:49 
* @version V1.0 
*/
public class ApolloDataSourceFactory extends DataSourceFactory {

	private final static Logger logger = Logger.getLogger(ApolloDataSourceFactory.class);

	public Map<String, Properties> propertiesMap = new ConcurrentHashMap<String, Properties>();

	public boolean isShard = false;
	
	public void init() {
		System.out.println("init db start..");
		initDB();
		System.out.println("initShardRule...");
		initShardRule(ruleFilePath);
		System.out.println("init db finished..");
	}

	public void initDB(){
		if(!dbConfigPrefixs.isEmpty()){
			PropertyResolver propertyResolver = PropertyConfigurer.getPropertyResolver();
			//TODO
			for (Map.Entry<String, String> entity : dbConfigPrefixs.entrySet()) {
				PropertyResolver shardPropertyResolver = new RelaxedPropertyResolver(propertyResolver,entity.getValue()+".");
				DruidProperty druidProperty = druidPropertyResolver(shardPropertyResolver, entity.getKey());
				DataSource datasource = buildDataSource(druidProperty);
				super.dataSourcesMap.put(entity.getKey(), datasource);
				
				System.out.println(shardPropertyResolver.getProperty("connectionURL"));
			}
		}
	}
	
	private DruidProperty druidPropertyResolver(PropertyResolver shardPropertyResolver,String dbKey){
		DruidProperty druidProperty = new DruidProperty();
		Properties properties = new Properties();
		//TODO
		try {
			createSingleDataSource(properties);
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return druidProperty;
	}
	
	/**
	 * 创建一个数据源
	 * @param druidProperty
	 * @return
	 */
	private DataSource buildDataSource(DruidProperty druidProperty){
		String driverClassName = druidProperty.getDriverClass();
		String url = druidProperty.getUrl();
		String username = druidProperty.getUserName();
		String password = druidProperty.getPassWord();

		DruidDataSource druidDataSource = new DruidDataSource();
		druidDataSource.setDriverClassName(driverClassName);
		druidDataSource.setUrl(url);
		druidDataSource.setUsername(username);
		druidDataSource.setPassword(password);

		druidDataSource.setInitialSize(Integer.valueOf(druidProperty.getInitialSize()));
		druidDataSource.setMinIdle(Integer.valueOf(druidProperty.getMinIdle()));
		druidDataSource.setMaxActive(Integer.valueOf(druidProperty.getMaxActive()));
		druidDataSource.setMaxWait(Integer.valueOf(druidProperty.getMaxWait()));
		druidDataSource.setTimeBetweenEvictionRunsMillis(Integer.valueOf(timeBetweenEvictionRunsMillis);
		druidDataSource.setMinEvictableIdleTimeMillis(Integer.valueOf(minEvictableIdleTimeMillis);
		druidDataSource.setValidationQuery(Integer.valueOf(validationQuery);
		druidDataSource.setTestWhileIdle(testWhileIdle);
		druidDataSource.setTestOnBorrow(testOnBorrow);
		druidDataSource.setTestOnReturn(Boolean.valueOf(gettestOnReturn);
		druidDataSource.setPoolPreparedStatements(poolPreparedStatements);
		druidDataSource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
		druidDataSource.setConnectionProperties(connectionProperties);
		try {
			druidDataSource.setFilters(filters);
			druidDataSource.init();
		} catch (SQLException e) {
			e.printStackTrace();
		}
		return druidDataSource;
	}
	
	/**
	 * 初始化分片规则文件
	 * @param ruleFilePath
	 */
	private void initShardRule(String ruleFilePath){
		if(!StringUtil.isEmptyOrBlank(ruleFilePath)){
			isShard = true;
			new RuleConfigXML(ruleFilePath).getShardRuleConfList();
		}
	}
	
	/**
	 * @return the propertiesMap
	 */
	public Map<String, Properties> getPropertiesMap() {
		return propertiesMap;
	}

	/**
	 * @param propertiesMap the propertiesMap to set
	 */
	public void setPropertiesMap(Map<String, Properties> propertiesMap) {
		this.propertiesMap = propertiesMap;
	}

	
}
