package com.hzy.sharding.config;

import com.alibaba.druid.pool.DruidDataSource;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.shardingsphere.api.config.masterslave.MasterSlaveRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.KeyGeneratorConfiguration;
import org.apache.shardingsphere.api.config.sharding.ShardingRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.TableRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.strategy.NoneShardingStrategyConfiguration;
import org.apache.shardingsphere.api.config.sharding.strategy.ShardingStrategyConfiguration;
import org.apache.shardingsphere.core.rule.BindingTableRule;
import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.core.rule.TableRule;
import org.apache.shardingsphere.core.yaml.swapper.ShardingRuleConfigurationYamlSwapper;
import org.apache.shardingsphere.encrypt.api.EncryptRuleConfiguration;
import org.apache.shardingsphere.shardingjdbc.api.ShardingDataSourceFactory;
import org.apache.shardingsphere.shardingjdbc.jdbc.core.datasource.ShardingDataSource;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.mybatis.spring.annotation.MapperScan;
import org.mybatis.spring.mapper.MapperScannerConfigurer;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;

import javax.sql.DataSource;
import java.sql.SQLException;
import java.util.*;

@Configuration
public class DataSourceConfig {

    @Bean(name = "data1")
    @ConfigurationProperties(prefix = "spring.datasource")
    public DataSource dataSource() {
        return new DruidDataSource();
    }


    @Bean(name = "dataSource")
    public ShardingDataSource dataSource(DataSource data1) throws SQLException {
        Map<String, DataSource> dataSourceMap = new HashMap<>(); //设置分库映射
        dataSourceMap.put("data1", data1);

        // 逻辑表名
        List<String> tableNames = Arrays.asList("t_student");

        return new ShardingDataSource(dataSourceMap, shardingRule(tableNames), getProperties()); //设置默认库，两个库以上时必须设置默认库。默认库的数据源名称必须是dataSourceMap的key之一
    }

    /**
     * ShardingRule配置整合
     * @return
     */
    public ShardingRule shardingRule(List<String> tableNames){
        //具体分库分表策略
        ShardingRuleConfiguration shardingRuleConfig = new ShardingRuleConfiguration();
        shardingRuleConfig.setDefaultDataSourceName("data1");
        // 默认的分表规则
        shardingRuleConfig.setDefaultTableShardingStrategyConfig(new NoneShardingStrategyConfiguration());
        // 默认的分库规则
        shardingRuleConfig.setDefaultDatabaseShardingStrategyConfig(new NoneShardingStrategyConfiguration());
        // 指定表的主键生成策略为SNOWFLAKE
        shardingRuleConfig.setDefaultKeyGeneratorConfig(new KeyGeneratorConfiguration("SNOWFLAKE", "id"));
        // 主从
//        shardingRuleConfig.setMasterSlaveRuleConfigs(null);
        // 定义具体的表
        shardingRuleConfig.setTableRuleConfigs(tableRuleConfigurations(tableNames));
        //
        shardingRuleConfig.setBindingTableGroups(Arrays.asList("t_student"));
        //
        shardingRuleConfig.setBroadcastTables(Arrays.asList("t_config"));
        // 数据库指定
        List<String> dataSourceNames = new ArrayList<>();
        dataSourceNames.add("data1");
        return new ShardingRule(shardingRuleConfig, dataSourceNames);
    }

    /**
     * 表规则设定
     * @return
     */
    public List<TableRuleConfiguration> tableRuleConfigurations(List<String> tableNames){
        List<TableRuleConfiguration> tableRuleConfigs = new LinkedList();

        for (String table: tableNames){
            String actualDataNodes = "data1.".concat(table).concat("_$->{1..2}");
            TableRuleConfiguration ruleConfiguration = new TableRuleConfiguration(table, actualDataNodes);
//            ruleConfiguration.setDatabaseShardingStrategyConfig(null);
            String algorithmExpression = table.concat("_$->{id % 2 + 1}");
            ruleConfiguration.setTableShardingStrategyConfig(new ModuloTableShardingAlgorithm("id", algorithmExpression));
            // 指定表的主键生成策略为SNOWFLAKE
            ruleConfiguration.setKeyGeneratorConfig(new KeyGeneratorConfiguration("SNOWFLAKE", "id"));

            tableRuleConfigs.add(ruleConfiguration);
        }
        return tableRuleConfigs;
    }

    /**
     * SqlSessionFactory
     * @param dataSource
     * @return
     * @throws Exception
     */
    @Bean
    public SqlSessionFactory sqlSessionFactory(DataSource dataSource) throws Exception {
        SqlSessionFactoryBean factoryBean = new SqlSessionFactoryBean();
        factoryBean.setDataSource(dataSource);
        factoryBean.setTypeAliasesPackage("com.hzy.sharding.entity");
        // 设置mapper.xml的位置路径
        Resource[] resources = new PathMatchingResourcePatternResolver().getResources("classpath:mapper/*.xml");
        factoryBean.setMapperLocations(resources);
        return factoryBean.getObject();
    }

    /**
     * 事务
     * @param dataSource
     * @return
     */
    @Bean
    public PlatformTransactionManager transactionManager(DataSource dataSource) {
        return new DataSourceTransactionManager(dataSource);
    }

    /**
     * mybatis配置
     * @return
     */
    @Bean
    public MapperScannerConfigurer scannerConfigurer() {
        MapperScannerConfigurer configurer = new MapperScannerConfigurer();
        configurer.setSqlSessionFactoryBeanName("sqlSessionFactory");
        configurer.setBasePackage("com.hzy.sharding.dao");
        return configurer;
    }

    private static Properties getProperties() {
        Properties propertie = new Properties();
        //是否打印SQL解析和改写日志
        propertie.setProperty("sql.show",Boolean.TRUE.toString());
        //用于SQL执行的工作线程数量，为零则表示无限制
        propertie.setProperty("executor.size","4");
        //每个物理数据库为每次查询分配的最大连接数量
        propertie.setProperty("max.connections.size.per.query","1");
        //是否在启动时检查分表元数据一致性
        propertie.setProperty("check.table.metadata.enabled","false");
        return propertie;
    }
}
