package com.example.yckjbigdataservice.config;

import com.example.yckjbigdataservice.utils.HbaseLoginUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.security.User;
import org.apache.phoenix.jdbc.PhoenixDriver;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import org.springframework.core.Ordered;
import org.springframework.core.annotation.Order;

import java.io.IOException;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.*;

@Configuration
@Slf4j
@Order(Ordered.LOWEST_PRECEDENCE)
public class HbaseConfig {

    private static final String ZOOKEEPER_SERVER_PRINCIPAL_KEY = "zookeeper.server.principal";

    private static final String ZOOKEEPER_DEFAULT_LOGIN_CONTEXT_NAME = "Client";

    /**
     * Properties for enabling encrypted HBase ZooKeeper communication
     */
    private static final String ZK_CLIENT_CNXN_SOCKET = "zookeeper.clientCnxnSocket";

    private static final String ZK_CLIENT_SECURE = "zookeeper.client.secure";

    private static final String ZK_SSL_SOCKET_CLASS = "org.apache.zookeeper.ClientCnxnSocketNetty";

    @Value("${custom.hbase.principal}")
    private String principal;

    @Value("${custom.hbase.user-keytab.file.path}")
    private String userKeytabFile;

    @Value("${custom.hbase.kbr5.file.path}")
    private String krb5File;

    @Value("${custom.hbase.config-path}")
    private String confDir;

    @Value("${custom.zookeeper.server.principal}")
    private String zookeeperPrincipal;

    /*@Bean
    @DependsOn()
    public Connection hbaseConnection(@Qualifier("createHbaseConfiguration") org.apache.hadoop.conf.Configuration createHbaseConfiguration) throws IOException {
        int corePoolSize = 5;
        int maximumPoolSize = 40;
        long keepAliveTime = 1000L;
        TimeUnit unit = TimeUnit.MILLISECONDS;
        BlockingQueue<Runnable> workQueue = new LinkedBlockingQueue<>();
        ThreadFactory threadFactory = Executors.defaultThreadFactory();
        RejectedExecutionHandler handler = new ThreadPoolExecutor.AbortPolicy();
        ExecutorService executor = new ThreadPoolExecutor(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, handler);

        return ConnectionFactory.createConnection(createConnection,executor);
    }*/

    /**
     * 获取Hbase连接
     * @param createHbaseConfiguration
     * @return
     */
    public static Connection getHbaseConnection(org.apache.hadoop.conf.Configuration createHbaseConfiguration) throws IOException {
        int corePoolSize = 5;
        int maximumPoolSize = 40;
        long keepAliveTime = 1000L;
        TimeUnit unit = TimeUnit.MILLISECONDS;
        BlockingQueue<Runnable> workQueue = new LinkedBlockingQueue<>();
        ThreadFactory threadFactory = Executors.defaultThreadFactory();
        RejectedExecutionHandler handler = new ThreadPoolExecutor.AbortPolicy();
        ExecutorService executor = new ThreadPoolExecutor(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, handler);

        return ConnectionFactory.createConnection(createHbaseConfiguration,executor);
    }

    @Bean
    @DependsOn("createHbaseConfiguration")
    public Properties phoenixProperties(@Qualifier("createHbaseConfiguration") org.apache.hadoop.conf.Configuration createHbaseConfiguration) throws SQLException {
        Properties props = new Properties();
        // 加载Phoenix JDBC驱动
        try {
            Class.forName("org.apache.phoenix.jdbc.PhoenixDriver");
        } catch (ClassNotFoundException e) {
            throw new RuntimeException(e);
        }
        DriverManager.registerDriver(PhoenixDriver.INSTANCE);
        for (Map.Entry<String, String> entry : createHbaseConfiguration) {
            props.put(entry.getKey(), entry.getValue());
        }
        return props;
    }

    @Bean
    public org.apache.hadoop.conf.Configuration createHbaseConfiguration() throws IOException {
        org.apache.hadoop.conf.Configuration conf = init();
        if (User.isHBaseSecurityEnabled(conf)) {
            System.setProperty(ZOOKEEPER_SERVER_PRINCIPAL_KEY, zookeeperPrincipal);
            HbaseLoginUtil.setJaasConf(ZOOKEEPER_DEFAULT_LOGIN_CONTEXT_NAME, principal, userKeytabFile);
            HbaseLoginUtil.login(principal, userKeytabFile, krb5File, conf);
        }
        handlZkSslEnabled(conf);
        return conf;
    }

    public org.apache.hadoop.conf.Configuration init() throws IOException {
        // Default load from conf directory
        org.apache.hadoop.conf.Configuration conf = HBaseConfiguration.create();
        conf.addResource(new Path(confDir , "core-site.xml"), false);
        conf.addResource(new Path(confDir , "hdfs-site.xml"), false);
        conf.addResource(new Path(confDir , "hbase-site.xml"), false);
        // 启用keytab renewal
        conf.set("hadoop.kerberos.keytab.login.autorenewal.enabled", "true");
        return conf;
    }

    private void handlZkSslEnabled(org.apache.hadoop.conf.Configuration conf) {
        boolean zkSslEnabled = conf.getBoolean("HBASE_ZK_SSL_ENABLED", false);
        if (zkSslEnabled) {
            System.setProperty(ZK_CLIENT_CNXN_SOCKET, ZK_SSL_SOCKET_CLASS);
            System.setProperty(ZK_CLIENT_SECURE, "true");
        } else {
            if (System.getProperty(ZK_CLIENT_CNXN_SOCKET) != null) {
                System.clearProperty(ZK_CLIENT_CNXN_SOCKET);
            }
            if (System.getProperty(ZK_CLIENT_SECURE) != null) {
                System.clearProperty(ZK_CLIENT_SECURE);
            }
        }
    }
}
