package com.etlmaster.core;

import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import javax.sql.DataSource;

import org.springframework.beans.MutablePropertyValues;
import org.springframework.beans.factory.config.ConstructorArgumentValues;
import org.springframework.beans.factory.support.GenericBeanDefinition;
import org.springframework.jdbc.datasource.DriverManagerDataSource;

import com.etlmaster.executor.utils.LogWriter;

/**
 * @author Larry
 * @copyright Larry@etlMaster
 * @since 2017-09-10
 */
public class DataSourceHolder{
	
	
	private DataSourceHolder(){
		
	}
	
	private static DataSourceHolder instance = new DataSourceHolder();
	
	public static DataSourceHolder getInstance(){
		return instance;
	}
	
	private Map<String,DataSourceDefine> dsDefs = new HashMap<String,DataSourceDefine>();
	
	public Map<String, DataSourceDefine> getDsDefs() {
		return dsDefs;
	}
	
	//dp-executor方法调用
	public void addDsDefs(String id,DataSourceDefine dataSourceDefine) {
		dsDefs.put(id, dataSourceDefine);
	}
	
	public DataSourceDefine getDataSourceDefine(String dsName) {
		return dsDefs.get(dsName);
	}
	

	
	private boolean isNull(String str){
		return str==null || str.length()==0;
	}
	
	public DataSource getDataSource(String dsName,String username,String password){
		String fullDsName = dsName+"-"+username;
		DataSource ds = null;
		try{
			ds = (DataSource)BeanFactory.getBean(fullDsName);
		}catch(Exception e){
			LogWriter.addLog("DEBUG","租户数据源[{}]初次链接未获取数据源",fullDsName);
		}
		if(ds==null){
			if(password==null || password.length()==0){
				LogWriter.addLog("WARN","租户数据源[{}]链接的密码为空，可能造成获取数据源失败",fullDsName);
			}
			DataSourceDefine def = dsDefs.get(dsName);
			BeanFactory.registerBeanDefinition(fullDsName, genDataSourceBeanDefinition(def.getUrl(), def.getDriverClassName(), username, password), null);
			ds = (DataSource)BeanFactory.getBean(fullDsName);
		}
		return ds;
	}
	
	public void registerDynamicDataSource(DataSourceDefine def){
		BeanFactory.registerBeanDefinition(def.getId(), genDataSourceBeanDefinition(def.getUrl(), def.getDriverClassName(), def.getUsername(), def.getPassword()), def.getAlias());
	}
	
	GenericBeanDefinition genDataSourceBeanDefinition(String url,String driverName,String username,String password){
		if(driverName.indexOf("hive")>0 || driverName.indexOf("transwarp")>0 || driverName.indexOf("spark")>0){
			return genHadoopDsBeanDefinition(driverName, url,username,password);
		}else{
			return genRdbDsBeanDefinition(driverName,  url, username, password);
		}
	}
	
	public String getDataSourceType(String name){
		return dsDefs.get(name).getDsType();
	}
	
	private GenericBeanDefinition genRdbDsBeanDefinition(String driverName,String url,String username,String password){
		GenericBeanDefinition gbd = new GenericBeanDefinition();
		MutablePropertyValues mpv = new MutablePropertyValues();
		mpv.add("driverClassName", driverName);
		mpv.add("url", url);
		mpv.add("username", username);
		mpv.add("password", password);
		
		Class clz = null;
		try{
			clz = Class.forName("org.apache.commons.dbcp.BasicDataSource");
		}catch(Exception e){
			LogWriter.addLog("DEBUG","容器没有加载到连接池类：org.apache.commons.dbcp.BasicDataSource，使用的是直连可能会导致性能下降");
			clz = DriverManagerDataSource.class;
		}
		gbd.setBeanClass(clz);
		gbd.setPropertyValues(mpv);
		return gbd;
	}
	
	private GenericBeanDefinition genHadoopDsBeanDefinition(String driverName,String url,String username,String password){
		GenericBeanDefinition hadoopDriverBeanDef = new GenericBeanDefinition();
		hadoopDriverBeanDef.setBeanClassName(driverName);
		String dsType = getDatasourceType(driverName);
		BeanFactory.registerBeanDefinition(dsType+"-driver", hadoopDriverBeanDef, null);
		GenericBeanDefinition gbd = new GenericBeanDefinition();
		gbd.setBeanClass(org.springframework.jdbc.datasource.SimpleDriverDataSource.class);
		ConstructorArgumentValues args = new ConstructorArgumentValues();
		args.addIndexedArgumentValue(0, hadoopDriverBeanDef );
		args.addIndexedArgumentValue(1,url);
		args.addIndexedArgumentValue(2, username);
		args.addIndexedArgumentValue(3, password);
		gbd.setConstructorArgumentValues(args);
		return gbd;
	}
	
	private String getDatasourceType(String driverName){
		String result="";
		if(driverName!=null){
			Pattern pattern = Pattern.compile(".*(db2|oracle|mysql|teradata|sqlserver|hive|transwarp|spark|gbase).*");
			Matcher m = pattern.matcher(driverName.toLowerCase());
			if(m.matches()){
				result = m.group(1);
			}else{
				result = driverName;
			}
		}
		return result;
	}
}
