package com.geping.etl.config;

import java.io.FileInputStream;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;

import javax.sql.DataSource;

import com.geping.etl.common.util.Constants;
import com.geping.etl.utils.EnvUitl;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import org.springframework.context.annotation.Profile;
import org.springframework.core.env.AbstractEnvironment;
import org.springframework.core.env.CompositePropertySource;
import org.springframework.core.env.Environment;
import org.springframework.core.env.MapPropertySource;
import org.springframework.core.env.PropertySource;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidDataSourceFactory;
/**
 *
 * @author chenxuan
 *
 */
@Configuration
@EnableTransactionManagement
@EnableJpaRepositories({"com.geping.etl.common.repository"})
public class DatasourceConfig {

	private final static Logger LOGGER = LoggerFactory.getLogger(DatasourceConfig.class);

	private static final String DRUID_PREFIX = "druid.";


	@Bean(name="dataSource")
	@DependsOn({"envUitl"})
	public DataSource dataSource() {
		LOGGER.info("----begin init datasource----");
		Properties dbProperties = new Properties();
		DruidDataSource dataSource = null;
		try {
			if(Constants.ON.equals(EnvUitl.outSetting)){
				String path = EnvUitl.outSettingPath + EnvUitl.outSettingConfigName;
				FileInputStream ins = new FileInputStream(path);
				dbProperties.load(ins);
			}else {
				dbProperties.load(DatasourceConfig.class.getClassLoader().getResourceAsStream(EnvUitl.profile + "/config.properties"));
			}
			dbProperties = getPropertiesFromSource(dbProperties);
			dataSource = (DruidDataSource) DruidDataSourceFactory.createDataSource(dbProperties);
			if(null != dataSource) {
				dataSource.setFilters("wall,stat");
				//        		dataSource.setTimeBetweenLogStatsMillis(5000);
				dataSource.init();
			}
		} catch (Exception e) {
			throw new RuntimeException("load datasource error, dbProperties is :" + dbProperties, e);
		}
		LOGGER.info("----end init datasource----");
		return dataSource;
	}

	private Properties getPropertiesFromSource(Properties tempDbProperties) {
		String databaseType = tempDbProperties.getProperty("database.type")!=null?tempDbProperties.getProperty("database.type"):"";
		Properties dbProperties = new Properties();
		for (Object object : tempDbProperties.keySet()) {
			String key = object!=null?object.toString():"";
			if (StringUtils.isNotBlank(databaseType) && key.startsWith(databaseType)) {
				dbProperties.put(key.replaceFirst(databaseType, ""), tempDbProperties.getProperty(key));
			} else if(key.startsWith(DRUID_PREFIX)){
				dbProperties.put(key.replaceFirst(DRUID_PREFIX, ""), tempDbProperties.getProperty(key));
			} else if (key.startsWith(databaseType + DRUID_PREFIX)) {
				dbProperties.put(key.replaceFirst(databaseType + DRUID_PREFIX, ""), tempDbProperties.getProperty(key));
			}
			dbProperties.put(key, tempDbProperties.getProperty(key));
		}
		return dbProperties;

	}

	@Bean
	public PlatformTransactionManager transactionManager(){
		JpaTransactionManager transactionManager = new JpaTransactionManager();
		transactionManager.setDataSource(dataSource());
		return transactionManager;
	}

}