package com.booter.merchant.config;

import com.zaxxer.hikari.HikariDataSource;
import lombok.extern.slf4j.Slf4j;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.shardingsphere.api.config.sharding.KeyGeneratorConfiguration;
import org.apache.shardingsphere.api.config.sharding.ShardingRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.TableRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.strategy.InlineShardingStrategyConfiguration;
import org.apache.shardingsphere.shardingjdbc.api.ShardingDataSourceFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.io.support.ResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;

import javax.sql.DataSource;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

/**
 * 数据源创建时不会初始化连接池，当getConnection后才会后连接池相关配置
 */
@Slf4j
@Configuration
public class ShardingDataSourceConfig {
/*
	hikaricp.connections    当前总连接数，包括空闲的连接和使用中的连接。（4 = 3 + 1）对应上面日志;
	Connections = activeConnection + idleConnections，会随着连接使用情况变化。
	hikaricp.connections.active    正在使用中活跃连接数 (1)，会随着连接使用情况变化。
	hikaricp.connections.idle    空闲连接数 (3) ，会随着连接使用情况变化。
	hikaricp.connections.max    最大连接数 (10)，初始配置。
	hikaricp.connections.min    最小连接数 (3)，初始配置。
	hikaricp.connections.pending    正在等待连接的线程数量（0）。重点：一般来说，这里应该都是0，如果存在这个数据并且时间较长要触发告警，视情况加大最大连接数。
	hikaricp.connections.acquire    获取每个连接需要时间，单位为ns。
	hikaricp.connections.creation    连接创建时间，单位为ms。
	hikaricp.connections.timeout    创建连接超时次数。
	hikaricp.connections.usage    连接从池中取出到返回的时间，单位为ms。即连接被业务占用时间（3.017s）。重点：这个时间长的话,可能是慢SQL或者长事务导致连接被占用问题。

	需要Codahale/Dropwizard 支持

	public MetricRegistry initMetricRegistry(String poolName) {
		MetricRegistry metricRegistry = new MetricRegistry();
		Slf4jReporter reporter = Slf4jReporter.forRegistry(metricRegistry)
				.outputTo(log)
				.convertRatesTo(TimeUnit.SECONDS)
				.convertDurationsTo(TimeUnit.MILLISECONDS)
				.build();
		reporter.start(30, TimeUnit.SECONDS);//30秒打印一次
		return metricRegistry;
	}

  dataSource.setMetricRegistry(initMetricRegistry(dataSource.getPoolName()));
*/


//	public DataSource getDataSource() {
//		HikariConfig hikariConfig = new HikariConfig();
//		hikariConfig.setJdbcUrl("jdbc:mysql://localhost:3306/test?charactorEncoding=utf-8&useSSL=false");
//		hikariConfig.setUsername("root");
//		hikariConfig.setPassword("123456");
//		hikariConfig.setDriverClassName("");
//		HikariDataSource dataSource = new HikariDataSource(hikariConfig);
//		return dataSource;
//	}




	@Bean
	@Primary
	@ConfigurationProperties(prefix = "spring.datasource.ds0")
	public DataSource ds0() {
		return DataSourceBuilder.create().type(HikariDataSource.class).build();
	}


	@Bean
	@ConfigurationProperties(prefix = "spring.datasource.ds1")
	public DataSource ds1() {
		return DataSourceBuilder.create().type(HikariDataSource.class).build();
	}
	@Bean
	@ConfigurationProperties(prefix = "spring.datasource.ds2")
	public DataSource ds2() {
		return DataSourceBuilder.create().type(HikariDataSource.class).build();
	}
	@Bean
	@ConfigurationProperties(prefix = "spring.datasource.ds3")
	public DataSource ds3() {
		return DataSourceBuilder.create().type(HikariDataSource.class).build();
	}

	@Bean("dataSource")
	public DataSource dataSource(@Qualifier("ds0") DataSource ds0,@Qualifier("ds1") DataSource ds1
	,@Qualifier("ds2") DataSource ds2,@Qualifier("ds3") DataSource ds3) throws SQLException {
		//设置分库映射,
		Map<String, DataSource> dataSourceMap = new HashMap<>();
		dataSourceMap.put("merchant_0", ds0);
		dataSourceMap.put("merchant_1", ds1);

		dataSourceMap.put("user_0", ds2);
		dataSourceMap.put("user_1", ds3);


		/****
		 * 1 水平分库分表
		 */
		//根据商户编号hash取余分片
		TableRuleConfiguration merchantRuleConfig = new TableRuleConfiguration("t_merchant", "merchant_$->{0..1}.t_merchant_$->{0..2}");
		merchantRuleConfig.setDatabaseShardingStrategyConfig(new InlineShardingStrategyConfiguration("id",
				"merchant_${Math.abs(id.hashCode()) % 2}"));
		merchantRuleConfig.setTableShardingStrategyConfig(new InlineShardingStrategyConfiguration("id",
				"t_merchant_${Math.abs(id.hashCode()) % 3}"));
		merchantRuleConfig.setKeyGeneratorConfig(new KeyGeneratorConfiguration("SNOWFLAKE", "id", getProperties()));

		/*** actualDataNodes 没有set方法，必须在构造方法中初始化，否则会有actualDataNodes 未设置的异常 4.0的rc版本与正式版不一样***/
		TableRuleConfiguration orderRuleConfig = new TableRuleConfiguration("t_merchant_order", "merchant_$->{0..1}.t_merchant_order_$->{0..2}");
		//使用内置自增主键，在插入数据时无需在显示的设置主键的值，主键字段不能出现在插入语句中
		orderRuleConfig.setKeyGeneratorConfig(new KeyGeneratorConfiguration("SNOWFLAKE", "id", getProperties()));
		orderRuleConfig.setDatabaseShardingStrategyConfig(new InlineShardingStrategyConfiguration("mid",
				"merchant_${Math.abs(mid.hashCode()) % 2}"));
		orderRuleConfig.setTableShardingStrategyConfig(new InlineShardingStrategyConfiguration("mid",
				"t_merchant_order_${Math.abs(mid.hashCode()) % 3}"));

		/*******
		 * 2 垂直分库
		 */
		TableRuleConfiguration userRuleConfig = new TableRuleConfiguration("t_user", "user_$->{0..1}.t_user");
		userRuleConfig.setKeyGeneratorConfig(new KeyGeneratorConfiguration("SNOWFLAKE", "id", getProperties()));
		userRuleConfig.setDatabaseShardingStrategyConfig(new InlineShardingStrategyConfiguration("id",
				"user_${Math.abs(id.hashCode()) % 2}"));
		userRuleConfig.setTableShardingStrategyConfig(new InlineShardingStrategyConfiguration("id",
				"t_user"));

		TableRuleConfiguration userOrderRuleConfig = new TableRuleConfiguration("t_user_order", "user_$->{0..1}.t_user_$->{0..2}");
		userOrderRuleConfig.setKeyGeneratorConfig(new KeyGeneratorConfiguration("SNOWFLAKE", "id", getProperties()));
		userOrderRuleConfig.setDatabaseShardingStrategyConfig(new InlineShardingStrategyConfiguration("user_id",
				"user_${Math.abs(id.hashCode()) % 2}"));
		userOrderRuleConfig.setTableShardingStrategyConfig(new InlineShardingStrategyConfiguration("user_id",
				"t_user_order_${Math.abs(user_id.hashCode()) % 2}"));


		ShardingRuleConfiguration shardingRuleConfig = new ShardingRuleConfiguration();
		shardingRuleConfig.getTableRuleConfigs().add(merchantRuleConfig);
		shardingRuleConfig.getTableRuleConfigs().add(orderRuleConfig);
		shardingRuleConfig.getTableRuleConfigs().add(userRuleConfig);
		shardingRuleConfig.getTableRuleConfigs().add(userOrderRuleConfig);


		//设置绑定表
		shardingRuleConfig.getBindingTableGroups().add("t_merchant_order,t_merchant");

		shardingRuleConfig.getBindingTableGroups().add("t_user,t_user_order");

		//全局表 每个节点上都有此比表,每个数据源都得有此表
		shardingRuleConfig.getBroadcastTables().add("t_dict");
		Properties properties = new Properties();

		//显示sql
		properties.setProperty("sql.show", "true");
		properties.setProperty("executor.size", "10");


		return ShardingDataSourceFactory.createDataSource(dataSourceMap,
				shardingRuleConfig, properties
		);
	}


	@Bean(name = "sqlSessionFactory")
	public SqlSessionFactory sqlSessionFactory(
			@Qualifier("dataSource") DataSource dataSource) {
		try {
			SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
			bean.setDataSource(dataSource);

			ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
			Resource[] rs =resolver.getResources("classpath*:mybatis/config/mybatis_config.xml");
			bean.setConfigLocation(rs[0]);

			resolver = new PathMatchingResourcePatternResolver();
			Resource[] resources = resolver.getResources("classpath*:mybatis/mapper/*Mapper.xml");
			bean.setMapperLocations(resources);
			return bean.getObject();
		} catch (Exception ex) {
			ex.printStackTrace();
		}
		return null;
	}

	@Bean(name = "tx")
	public DataSourceTransactionManager transactionManager(
			@Qualifier("dataSource") DataSource dataSource) {
		return new DataSourceTransactionManager(dataSource);
	}

	@Bean(name = "sqlSessionTemplate")
	public SqlSessionTemplate sqlSessionTemplate(
			@Qualifier("sqlSessionFactory") SqlSessionFactory sqlSessionFactory) {
		return new SqlSessionTemplate(sqlSessionFactory);
	}


	private static Properties getProperties(){
		Properties p = new Properties();
//		p.setProperty("worker.id","123");
		return p;
	}

}
