//package com.audaque.encoding.config;
//
//import com.alibaba.druid.pool.DruidDataSource;
//import com.baomidou.dynamic.datasource.DynamicRoutingDataSource;
//import com.baomidou.dynamic.datasource.creator.DataSourceProperty;
//import com.baomidou.dynamic.datasource.provider.DynamicDataSourceProvider;
//import com.baomidou.dynamic.datasource.spring.boot.autoconfigure.DynamicDataSourceProperties;
//import jakarta.annotation.Resource;
//import lombok.SneakyThrows;
//import lombok.extern.slf4j.Slf4j;
//import org.apache.hadoop.security.UserGroupInformation;
//import org.springframework.beans.factory.annotation.Value;
//import org.springframework.context.annotation.Configuration;
//import org.springframework.context.annotation.Primary;
//
//import javax.sql.DataSource;
//import java.io.IOException;
//import java.util.List;
//import java.util.Properties;
//
///**
// * <p>
// * 手动注入多数据源
// * </p>
// *
// * @author 杨燚
// * @since 2024/4/8
// */
//@Slf4j
//@Configuration
//public class DataSourceConfig {
//
//    @Resource
//    private DynamicDataSourceProperties properties;
//
//    @Value("${kerberos.krb5Conf}")
//    String krb5Conf;
//
//    @Value("${kerberos.userKeytab}")
//    String userKeytab;
//
//    @Value("${kerberos.logDebug}")
//    String logDebug;
//
//    @Value("${kerberos.principal}")
//    String principal;
//
//
//
//    @Primary
////    @Bean
//    @SneakyThrows({IOException.class})
//    public DataSource dataSource(List<DynamicDataSourceProvider> providers) {
//        DynamicRoutingDataSource dataSource = new DynamicRoutingDataSource(providers);
//        dataSource.setPrimary(properties.getPrimary());
//        dataSource.setStrict(properties.getStrict());
//        dataSource.setStrategy(properties.getStrategy());
//        dataSource.setP6spy(properties.getP6spy());
//        dataSource.setSeata(properties.getSeata());
//
//        DruidDataSource druidDataSource = new DruidDataSource();
//        DataSourceProperty hiveDb = properties.getDatasource().get("hive");
//        if (hiveDb == null) {
//            log.warn("数据源注入失败，未找到名为[hive]的数据源");
//            return dataSource;
//        }
//        druidDataSource.setDriverClassName(hiveDb.getDriverClassName());
//        druidDataSource.setUrl(hiveDb.getUrl());
//
//        Properties props = new Properties();
//        props.put("auth", "KERBEROS");
//        druidDataSource.setConnectProperties(props);
//
//        // 配置kerberos认证
//        System.setProperty("java.security.krb5.conf", krb5Conf);
//        System.setProperty("sun.security.krb5.debug", logDebug);
//        org.apache.hadoop.conf.Configuration configuration = new org.apache.hadoop.conf.Configuration();
//        configuration.set("hadoop.security.authentication", "Kerberos");
//        UserGroupInformation.setConfiguration(configuration);
//
//        // 使用kerberos凭证进行登录
//        String krb5Principal = principal;
//        UserGroupInformation.loginUserFromKeytab(krb5Principal, userKeytab);
//        dataSource.addDataSource("hive", druidDataSource);
//
//        return dataSource;
//    }
//
//}
