package com.xj.config;

import com.zaxxer.hikari.HikariDataSource;
import org.apache.shardingsphere.driver.api.ShardingSphereDataSourceFactory;
import org.apache.shardingsphere.infra.config.algorithm.ShardingSphereAlgorithmConfiguration;
import org.apache.shardingsphere.sharding.api.config.ShardingRuleConfiguration;
import org.apache.shardingsphere.sharding.api.config.rule.ShardingTableRuleConfiguration;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties;
import org.springframework.context.annotation.*;

import javax.sql.DataSource;
import java.sql.SQLException;
import java.util.*;

/**
 * @author ricky
 * @date 2022/9/13
 */
@Configuration
public class ShardingJdbcConfig {


    private final DataSourceProperties dataSourceProperties;

    public static HikariDataSource hikariDataSource;

    public ShardingJdbcConfig(DataSourceProperties dataSourceProperties) {
        this.dataSourceProperties = dataSourceProperties;
    }

    public HikariDataSource jdbcDatasource() {
        HikariDataSource dataSource = new HikariDataSource();
        BeanUtils.copyProperties(dataSourceProperties, dataSource);
        dataSource.setDriverClassName(dataSourceProperties.getDriverClassName());
        dataSource.setJdbcUrl(dataSourceProperties.getUrl());
        dataSource.setUsername(dataSourceProperties.getUsername());
        dataSource.setPassword(dataSourceProperties.getPassword());
        dataSource.setConnectionTimeout(1000 * 60);
        hikariDataSource = dataSource;
        return dataSource;
    }

    @Bean
    @Primary
    @Qualifier("ds")
    public DataSource dataSource() throws SQLException {
        Map<String, DataSource> dataSourceMap = new HashMap<>();
        HikariDataSource dataSource = new HikariDataSource();
        dataSource.setDriverClassName(dataSourceProperties.getDriverClassName());
        dataSource.setJdbcUrl(dataSourceProperties.getUrl());
        dataSource.setUsername(dataSourceProperties.getUsername());
        dataSource.setPassword(dataSourceProperties.getPassword());
        dataSourceMap.put("ds", dataSource);

        Properties tableShardingAlgorithmProps = new Properties();
        tableShardingAlgorithmProps.put("jdbcDatasource", jdbcDatasource());
        tableShardingAlgorithmProps.put("sql-show", true);

        ShardingRuleConfiguration shardingRuleConfig = new ShardingRuleConfiguration();
        Collection<ShardingTableRuleConfiguration> shardingRuleConfigTables = shardingRuleConfig.getTables();
        Map<String, ShardingSphereAlgorithmConfiguration> shardingAlgorithms = shardingRuleConfig.getShardingAlgorithms();

        // 配置分片规则
        shardingRuleConfigTables.add(YhStandardShardingAlgorithm.getShardingTableRuleConfig());
        // 配置分表算法
        shardingAlgorithms.put(YhStandardShardingAlgorithm.TYPE, new ShardingSphereAlgorithmConfiguration(YhStandardShardingAlgorithm.TYPE, tableShardingAlgorithmProps));

        // // 配置 t_kafka_test 表规则
        // ShardingTableRuleConfiguration recordTableRuleConfigurationRedp = new ShardingTableRuleConfiguration("t_kafka_test", getActualDataNodesRedp());
        // // 配置分表策略
        // recordTableRuleConfigurationRedp.setTableShardingStrategy(new StandardShardingStrategyConfiguration("create_time", KafkaTestTableStandardShardingAlgorithm.TYPE));
        // // 配置分片规则
        // shardingRuleConfig.getTables().add(recordTableRuleConfigurationRedp);
        // // 配置分表算法
        // shardingRuleConfig.getShardingAlgorithms().put(KafkaTestTableStandardShardingAlgorithm.TYPE, new ShardingSphereAlgorithmConfiguration(KafkaTestTableStandardShardingAlgorithm.TYPE, tableShardingAlgorithmProps));

        // 创建 ShardingSphereDataSource
        return ShardingSphereDataSourceFactory.createDataSource(dataSourceMap, Collections.singleton(shardingRuleConfig), tableShardingAlgorithmProps);
    }

}
