package com.kingsoft.dc.khaos.module.spark.util;


//import com.kingsoft.dc.khaos.extender.meta.model.ds.Connect;

import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.HadoopKerberosName;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sun.security.krb5.Config;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Properties;

import static org.apache.hadoop.security.UserGroupInformation.setLoginUser;

public class HiveUtils {

    private static Logger log = LoggerFactory.getLogger(HiveUtils.class);

    public static void loginKerberos(String krb5Path,
                                     String keytabPath,
                                     String principal) throws Exception {
        UserGroupInformation ugi = null;
        try {
            log.info(" =>kerberos Info krb5Path:{} keytabPath:{} principal:{}", krb5Path, keytabPath, principal);
//            String principalValue = kerberosFile.getPrincipal();
//            String keytabPath = kerberosFile.getKeytabPath();
//            String krb5Path =  kerberosFile.getKrb5Path();
            System.setProperty("java.security.krb5.conf", krb5Path);
            Config.refresh();
            setLoginUser(null);
            HadoopKerberosName.setRules(null);
            Configuration conf = new Configuration();
            conf.set("hadoop.security.authentication", "kerberos");
            conf.setBoolean("hadoop.security.authorization", true);
//            conf.setBoolean("hbase.security.authorization", true);
//            conf.setBoolean("hbase.cluster.distributed", true);
            conf.set("hive.server2.authentication.kerberos.principal", principal);
            UserGroupInformation.setConfiguration(conf);
            ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytabPath);
            ugi.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS);
            setLoginUser(ugi);
            log.info("=> kerberos rules:{}", KerberosName.getRules());
        } catch (Exception e) {
            log.error("=>kerberos  KrbException ：", e);
            throw new Exception(e);
        }
    }

    public static Connection getJdbcConnect(String connectType, String principal, Properties props) throws SQLException {
//        log.info("=> hivejdbc properties  {}", props);
        String host = props.getProperty("hive.jdbc.host");
        String instanceName = props.getProperty("hive.jdbc.db");
        String hiveDriver = props.getProperty("hive.driver");
        String port = props.getProperty("hive.jdbc.port");
        String userName = props.getProperty("hive.jdbc.username");
        String password = props.getProperty("hive.jdbc.password");
        String zookeeperNamespace = props.getProperty("hive.jdbc.zookeeperNamespace");

        Properties properties = getCommonProperties();

        String url;
        if (connectType.equalsIgnoreCase("Cluster")) {
            url = String.format("jdbc:hive2://%s/%s;serviceDiscoveryMode=zooKeeper;zookeeperNamespace=%s", host, instanceName, zookeeperNamespace);
        } else {
            url = String.format("jdbc:hive2://%s:%s/%s", host, port, instanceName);
        }

        if (StringUtils.isNotBlank(principal)) {
            url = url + ";principal=" + principal;
        }

        if (StringUtils.isNotBlank(userName)) {
            properties.setProperty("username", userName);
        }

        properties.setProperty("driverClassName", hiveDriver);
        log.info("=> createDataSourceConnection properties  {}", properties);

        if (StringUtils.isNotBlank(password)) {
            properties.setProperty("password", password);
        }
        DriverManager.setLoginTimeout(10);
        log.info("=> hive url={}", url);

        return DriverManager.getConnection(url, properties);

    }

    public static String getJdbcUrl(String principal, Properties props) {
        String host = props.getProperty("hive.jdbc.host");
        String instanceName = props.getProperty("hive.jdbc.db");
        String port = props.getProperty("hive.jdbc.port");
        String url = String.format("jdbc:hive2://%s:%s/%s", host, port, instanceName);;
        if (StringUtils.isNotBlank(principal)) {
            url = url + ";principal=" + principal;
        }
        log.info("=> hive url={}", url);
        return url;

    }

    private static Properties getCommonProperties() {
        Properties properties = new Properties();
        properties.setProperty("timeBetweenEvictionRunsMillis", "1000");
        //超过timeBetweenEvictionRunsMillis，执行连接检测
        properties.setProperty("testWhileIdle", "true");
        //验证Sql
        properties.setProperty("validationQuery", "select 1");
        //秒
        properties.setProperty("validationQueryTimeout", "10");

        //毫秒 取连接时最大等待时间
        properties.setProperty("maxWait", "5000");

        properties.setProperty("maxActive", "1");

        properties.setProperty("removeAbandoned", "true");
        //超时时间，秒
        properties.setProperty("removeAbandonedTimeout", "90");
        //在每次空闲连接回收器线程(如果有)运行时检查的连接数量，最好和maxActive一致
        properties.setProperty("numTestsPerEvictionRun", "10");
        //配置一个连接在池中最小生存的时间，单位是毫秒
        properties.setProperty("minEvictableIdleTimeMillis", "300000");
//        properties.setProperty("connectionProperties","hive.stats.jdbc.timeout=5");
//        properties.setProperty("retries","2");
        return properties;
    }

}
