package com.iceframework.core.utils;

import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.iceframework.core.entity.ISqlParam;

/**
 * Druid数据源工具类
 * @author Palo
 *
 */
public class DruidDataSourceUtils {
	
	protected static final Log logger = LogFactory.getLog(DruidDataSourceUtils.class);
	
	private static Map<String, DruidDataSource> dataSourceMap = null;
	
	static {
		if (dataSourceMap == null) {
			dataSourceMap = new HashMap<>();
		}
	}

	/**
	 * 取到数据源Map
	 * @return
	 */
	public static Map<String, DruidDataSource> getDataSourceMap() {
		return dataSourceMap;
	}

	/**
	 * 取到数据源
	 * @param key
	 * @return
	 */
	public static DruidDataSource getDataSource(String key) {
		return dataSourceMap.get(key);
	}

	/**
	 * 创建数据源
	 * @param key
	 * @param driverClassName
	 * @param url
	 * @param username
	 * @param password
	 */
	public static void createDb(String key, String driverClassName, String url, String username, String password) {
		DruidDataSource dataSource = new DruidDataSource();
		dataSource.setUrl(url);
		dataSource.setUsername(username);
		dataSource.setPassword(password);
		dataSource.setDriverClassName(driverClassName);
		// 初始化时建立物理连接的个数
		dataSource.setInitialSize(Integer.parseInt(CommonUtils.getSpringBootProperties("spring.datasource.initialSize") == null ? "5" : CommonUtils.getSpringBootProperties("spring.datasource.initialSize")));
		// 最大连接池数量
		dataSource.setMaxActive(Integer.parseInt(CommonUtils.getSpringBootProperties("spring.datasource.maxActive") == null ? "20" : CommonUtils.getSpringBootProperties("spring.datasource.maxActive")));
		// 最小连接池数量
		dataSource.setMinIdle(Integer.parseInt(CommonUtils.getSpringBootProperties("spring.datasource.minIdle") == null ? "5" : CommonUtils.getSpringBootProperties("spring.datasource.minIdle")));
		// 获取连接时最大等待时间，单位毫秒。
		dataSource.setMaxWait(Integer.parseInt(CommonUtils.getSpringBootProperties("spring.datasource.maxWait") == null ? "60000" : CommonUtils.getSpringBootProperties("spring.datasource.maxWait")));
		// 间隔多久进行一次检测，检测需要关闭的空闲连接
		dataSource.setTimeBetweenEvictionRunsMillis(Integer.parseInt(CommonUtils.getSpringBootProperties("spring.datasource.timeBetweenEvictionRunsMillis") == null ? "60000" : CommonUtils.getSpringBootProperties("spring.datasource.timeBetweenEvictionRunsMillis")));
		// 一个连接在池中最小生存的时间
		dataSource.setMinEvictableIdleTimeMillis(Integer.parseInt(CommonUtils.getSpringBootProperties("spring.datasource.minEvictableIdleTimeMillis") == null ? "300000" : CommonUtils.getSpringBootProperties("spring.datasource.minEvictableIdleTimeMillis")));
		// 用来检测连接是否有效的sql
		dataSource.setValidationQuery(CommonUtils.getSpringBootProperties("spring.datasource.validationQuery") == null ? "select 1 from dual" : CommonUtils.getSpringBootProperties("spring.datasource.validationQuery"));
		// 建议配置为true，不影响性能，并且保证安全性。
		dataSource.setTestWhileIdle(Boolean.parseBoolean(CommonUtils.getSpringBootProperties("spring.datasource.testWhileIdle") == null ? "true" : CommonUtils.getSpringBootProperties("spring.datasource.testWhileIdle")));
		// 申请连接时执行validationQuery检测连接是否有效
		dataSource.setTestOnBorrow(Boolean.parseBoolean(CommonUtils.getSpringBootProperties("spring.datasource.testOnBorrow") == null ? "false" : CommonUtils.getSpringBootProperties("spring.datasource.testOnBorrow")));
		dataSource.setTestOnReturn(Boolean.parseBoolean(CommonUtils.getSpringBootProperties("spring.datasource.testOnReturn") == null ? "false" : CommonUtils.getSpringBootProperties("spring.datasource.testOnReturn")));
		// 是否缓存preparedStatement，也就是PSCache，oracle设为true，mysql设为false。分库分表较多推荐设置为false
		dataSource.setPoolPreparedStatements(Boolean.parseBoolean(CommonUtils.getSpringBootProperties("spring.datasource.poolPreparedStatements") == null ? "true" : CommonUtils.getSpringBootProperties("spring.datasource.poolPreparedStatements")));
		// 打开PSCache时，指定每个连接上PSCache的大小
		dataSource.setMaxPoolPreparedStatementPerConnectionSize(Integer.parseInt(CommonUtils.getSpringBootProperties("spring.datasource.maxPoolPreparedStatementPerConnectionSize") == null ? "20" : CommonUtils.getSpringBootProperties("spring.datasource.maxPoolPreparedStatementPerConnectionSize")));
		dataSource.setConnectionProperties(CommonUtils.getSpringBootProperties("spring.datasource.connectionProperties") == null ? "config.decrypt=false;druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000" : CommonUtils.getSpringBootProperties("spring.datasource.connectionProperties"));
		// 手动的加的时候只初始化一次
		dataSource.setBreakAfterAcquireFailure(true);
		try {
			dataSource.setFilters(CommonUtils.getSpringBootProperties("spring.datasource.filters") == null ? "config,stat,wall,log4j" : CommonUtils.getSpringBootProperties("spring.datasource.filters"));
		} catch (SQLException e) {
			e.printStackTrace();
		}
		dataSourceMap.put(key, dataSource);
	}
	
	/**
	 * 防止发生异常：nested exception is java.sql.SQLRecoverableException: IO Error: Broken pipe
	 * 原因：连接池链接一段时间之后，会被oracle在服务器端中断，而连接池并不知道自己的链接被中断，照旧进行连接操作，发生异常
	 * @param dataSource
	 * @param oldConn
	 * @return
	 * @throws SQLException
	 * </pre>
	 * @throws InterruptedException 
	 */
	public static DruidPooledConnection getValidConnection(DruidDataSource dataSource,DruidPooledConnection oldConn) throws SQLException {
		//get the connection from the datasource
		DruidPooledConnection conn = oldConn;
		SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_hh:mm:ss");
		int commonTimeout = 30;
		//check the connection, if the connection is not suitable, then get the new connection and check it again
		while(null == conn || conn.isClosed() || !conn.isValid(commonTimeout)) {
			String date5 = sdf.format(new Date());
			try {
				if(null != conn && !conn.isClosed()) {
					//close the connection
					conn.close();
				}
			} catch (SQLException e) {
				logger.error(date5+"======Can not close connection :\t"+e.getMessage());
			}
			//get the new connection
			conn = dataSource.getConnection();
			try {
				Thread.sleep(2000);
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
		}
		//return the valid connection
		return conn;
	}

	/**
	 * 取所有的字段及值
	 * @param key
	 * @param sql
	 * @return
	 * @throws Exception
	 */
	public static List<String> getColumnNames(String key, String sql) throws Exception {
		DruidDataSource datasoure = dataSourceMap.get(key);
		if (datasoure != null) {
			DruidPooledConnection conn = null;
			Statement stmt = null;
			ResultSet rs = null;
			try {
				conn = DruidDataSourceUtils.getValidConnection(datasoure, DruidDataSourceUtils.getValidConnection(datasoure, datasoure.getConnection()));
				stmt = conn.createStatement();
				rs = stmt.executeQuery(sql);
				ResultSetMetaData rsmd = rs.getMetaData();
				int colCount = rsmd.getColumnCount();
				List<String> colNameList = new ArrayList<String>();
				for (int i = 0; i < colCount; i++) {
					String columnName = rsmd.getColumnName(i + 1);
					if (StringUtils.isNotEmpty(columnName)) {
						colNameList.add(columnName);
					}
				}
				return colNameList;
			} finally {
				if (rs != null) {
					rs.close();
				}
				if (stmt != null) {
					stmt.close();
				}
				if (conn != null) {
					conn.close();
				}
			}
		}
		return null;
	}

	/**
	 * 取所有的字段及值
	 * @param rs
	 * @return
	 * @throws Exception
	 */
	public static List<Map<String, Object>> resultSetToList(ResultSet rs) throws Exception {
		List<Map<String, Object>> results = new ArrayList<Map<String, Object>>();
		ResultSetMetaData rsmd = rs.getMetaData();
		int colCount = rsmd.getColumnCount();
		List<String> colNameList = new ArrayList<String>();
		for (int i = 0; i < colCount; i++) {
			colNameList.add(rsmd.getColumnName(i + 1));
		}
		while (rs.next()) {
			Map<String, Object> map = new HashMap<>();
			for (int i = 0; i < colCount; i++) {
				String key = colNameList.get(i);
				Object value = rs.getString(colNameList.get(i));
				map.put(key, value);
			}
			results.add(map);
		}
		return results;
	}

	/**
	 * 执行sql
	 * @param key
	 * @param sql
	 * @return
	 * @throws Exception
	 */
	public static List<Map<String, Object>> executeQuery(String key, String sql) throws Exception {
		DruidDataSource datasoure = dataSourceMap.get(key);
		if (datasoure != null) {
			DruidPooledConnection conn = null;
			Statement stmt = null;
			ResultSet rs = null;
			try {
				conn = DruidDataSourceUtils.getValidConnection(datasoure, datasoure.getConnection());
				stmt = conn.createStatement();
				rs = stmt.executeQuery(sql);
				logger.debug("==> SQL:"+sql);
				return resultSetToList(rs);
			} finally {
				if (rs != null) {
					rs.close();
				}
				if (stmt != null) {
					stmt.close();
				}
				if (conn != null) {
					conn.close();
				}
			}
		}
		return null;
	}

	/**
	 * 执行sql
	 * @param key
	 * @param sql
	 * @return
	 * @throws SQLException
	 */
	public static int executeUpdate(String key, String sql) throws SQLException {
		DruidDataSource datasoure = dataSourceMap.get(key);
		if (datasoure != null) {
			DruidPooledConnection conn = null;
			Statement stmt = null;
			ResultSet rs = null;
			try {
				conn = DruidDataSourceUtils.getValidConnection(datasoure, datasoure.getConnection());
				conn.setAutoCommit(false);
				stmt = conn.createStatement();
				logger.debug("==> SQL:"+sql);
				return stmt.executeUpdate(sql);
			} finally {
				if (rs != null) {
					rs.close();
				}
				if (stmt != null) {
					stmt.close();
				}
				if (conn != null) {
					conn.close();
				}
			}
		}
		return 0;
	}

	/**
	 * 执行sql
	 * @param key
	 * @param sql
	 * @param param
	 * @return
	 * @throws Exception
	 */
	public static List<Map<String, Object>> executeQuery(String key, String sql, ISqlParam param) throws Exception {
		DruidDataSource datasoure = dataSourceMap.get(key);
		if (datasoure != null) {
			DruidPooledConnection conn = null;
			PreparedStatement pstmt = null;
			ResultSet rs = null;
			try {
				conn = DruidDataSourceUtils.getValidConnection(datasoure, datasoure.getConnection());
				conn.setAutoCommit(false);
				pstmt = conn.prepareStatement(sql);
				param.setParam(pstmt);
				rs = pstmt.executeQuery();
				logger.debug("==> SQL:"+sql);
				return resultSetToList(rs);
			} finally {
				if (rs != null) {
					rs.close();
				}
				if (pstmt != null) {
					pstmt.close();
				}
				if (conn != null) {
					conn.close();
				}
			}

		}
		return null;
	}

	/**
	 * 执行sql
	 * @param key
	 * @param sql
	 * @param param
	 * @return
	 * @throws Exception
	 */
	public static int executeUpdate(String key, String sql, ISqlParam param) throws Exception {
		DruidDataSource datasoure = dataSourceMap.get(key);
		if (datasoure != null) {
			DruidPooledConnection conn = null;
			PreparedStatement pstmt = null;
			try {
				conn = DruidDataSourceUtils.getValidConnection(datasoure, datasoure.getConnection());
				conn.setAutoCommit(false);
				pstmt = conn.prepareStatement(sql);
				logger.debug("==> SQL:"+sql);
				param.setParam(pstmt);
				return pstmt.executeUpdate();
			} catch (SQLException e) {
				throw e;
			} finally {
				if (pstmt != null) {
					pstmt.close();
				}
				if (conn != null) {
					conn.close();
				}
			}

		}
		return 0;
	}
	
	/**
	 * 销毁德鲁伊连接池
	 * @param key
	 * @throws Exception
	 */
	public static void destoryDruidDataSource(String key) throws Exception {
		DruidDataSource dataSource = getDataSource(key);
		Map<String, DruidDataSource> dataSourceMap = getDataSourceMap();
		dataSourceMap.remove(key);
		dataSource.close();
	}
	
}
