package chan.module.project.hw.mrs.hive2.v331;


import chan.module.project.comm.ConnectionCommonUtil;
import chan.module.project.comm.FieldMeta;
import chan.module.project.comm.PropertiesUtil;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.Scanner;

public class MRSHive2V331Main {
    private static final Logger logger = LoggerFactory.getLogger(MRSHive2V331Main.class);
    private static final String HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver";

    private static final String ZOOKEEPER_DEFAULT_LOGIN_CONTEXT_NAME = "Client";
    private static final String ZOOKEEPER_SERVER_PRINCIPAL_KEY = "zookeeper.server.principal";
    private static String ZOOKEEPER_DEFAULT_SERVER_PRINCIPAL = null;

    private static String KRB5_FILE = null;
    private static String USER_KEYTAB_FILE = null;

    /* zookeeper节点ip和端口列表 */
    private static String zkQuorum = null;
    private static String auth = null;
    private static String sasl_qop = null;
    private static String zooKeeperNamespace = null;
    private static String serviceDiscoveryMode = null;
    private static String principal = null;
    private static String auditAddition = null;
    private static String AUTH_HOST_NAME = null;
    private static Properties CLIENT_INFO;


    private static Statement statement = null;
    private static ResultSet resultSet = null;
    private static Connection connection = null;

    public static String getUserRealm() {
        String serverRealm = System.getProperty("SERVER_REALM");
        if (serverRealm != null && serverRealm != "") {
            AUTH_HOST_NAME = "hadoop." + serverRealm.toLowerCase();
        } else {
            serverRealm = KerberosUtil.getKrb5DomainRealm();
            if (serverRealm != null && serverRealm != "") {
                AUTH_HOST_NAME = "hadoop." + serverRealm.toLowerCase();
            } else {
                AUTH_HOST_NAME = "hadoop";
            }
        }
        return AUTH_HOST_NAME;
    }

    public static void main(String[] args) throws Exception {
        String config = null;
        for (String ars : args){
            if(ars.startsWith("config=")){
                config = ars.replace("config=","");
                break;
            }
        }
        if(config == null){
            logger.error("缺失config配置");
            return;
        }
        PropertiesUtil proUtil = new PropertiesUtil(config);
        String loginWay = proUtil.get("login.way");
        if("hive2".equalsIgnoreCase(loginWay)){
            logger.info("start login hive2.....");
            connection = loginHive2(proUtil);
            executeSQL();
        }else if("hdfs".equalsIgnoreCase(loginWay)) {
            logger.info("start login hdfs.....");
        }
        logger.info("close!");
    }

    public static void executeSQL() {
        Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
            @Override
            public void run() {
                close();
            }
        }));
        if (connection == null) {
            logger.info("连接为空，不执行查询！");
            return;
        }
        // 命令
        String command;
        Scanner scanner = new Scanner(System.in);
        while (true) {
            logger.info("输入执行命令：");
            logger.info("1-执行SQL脚本文件；2-执行SQL查询文件；3-执行SQL查询语句；其他-退出");
            command = scanner.next();
            if (!"1".equals(command) && !"2".equals(command) && !"3".equals(command) ) {
                break;
            }

            try {
                boolean isQuery = "2".equals(command) || "3".equals(command);
                String sql;
                if("1".equals(command) || "2".equals(command)){
                    logger.info("输入SQL文件路径：");
                    String chooseSqlFile = scanner.next();
                    logger.info("SQL文件路径为：" + chooseSqlFile);
                    File file = new File(chooseSqlFile);
                    sql = FileUtils.readFileToString(file, "UTF-8");
                }else {
                    logger.info("输入SQL语句：");
                    sql = scanner.next();
                }
                logger.info("准备执行SQL: {}", sql);
                statement = connection.createStatement();
                // 执行SQL脚本
                if (isQuery) {
                    // 执行SQL查询
                    resultSet = statement.executeQuery(sql);
                    ResultSetMetaData metaData = resultSet.getMetaData();
                    int columnCount = metaData.getColumnCount();
                    List<FieldMeta> fieldMetaList = new ArrayList<>(columnCount);
                    for (int i = 1; i <= columnCount; i++) {
                        FieldMeta fieldMeta = new FieldMeta(metaData, i);
                        fieldMetaList.add(fieldMeta);
                        logger.info("结果字段元信息：{}", fieldMeta);
                    }
                    int row = 0;
                    while (resultSet.next()) {
                        row++;
                        logger.info(">>>>>第{}行：", row);
                        for (FieldMeta fieldMeta : fieldMetaList) {
                            String originalColumnName = fieldMeta.getOriginalColumnName();
                            logger.info("{} : {}", originalColumnName, resultSet.getString(originalColumnName));
                        }
                        logger.info("--------------------");
                    }
                    ConnectionCommonUtil.close(resultSet, statement, null);
                    resultSet = null;
                    statement = null;
                } else {
                    boolean execute = statement.execute(sql);
                    logger.info("执行SQL完成！结果：{}", execute);
                    ConnectionCommonUtil.close(resultSet, statement, null);
                    resultSet = null;
                    statement = null;
                }
            } catch (Exception e) {
                ConnectionCommonUtil.close(resultSet, statement, null);
                resultSet = null;
                statement = null;
                logger.info("执行失败！", e);
            }
        }
        logger.info("执行SQL结束。");
    }

    private static void close(){
        ConnectionCommonUtil.close(resultSet, statement, connection);
        resultSet = null;
        statement = null;
        connection  = null;
    }

    public static Connection loginHive2(PropertiesUtil proUtil) throws Exception {
        Properties prop = proUtil.getProp();
        //参数配置项
        String hiveClientConfig = prop.getProperty("hive.client.config.file");
        String userKeytab = prop.getProperty("user.keytab.file");
        String krb5Conf = prop.getProperty("krb5.conf.file");
        String userName = prop.getProperty("login.user_name");
        logger.info("userName[{}],hiveClientConfig[{}],userKeytab[{}],krb5Conf[{}]",userName,hiveClientConfig,userKeytab,krb5Conf);
        if(userName == null || hiveClientConfig == null || userKeytab == null ||  krb5Conf == null ){
            logger.error("以上参数不能为空！");
            return null;
        }
        CLIENT_INFO = proUtil.loadConfig(hiveClientConfig).getProp();

        /**
         * zkQuorum获取后的格式为"xxx.xxx.xxx.xxx:24002,xxx.xxx.xxx.xxx:24002,xxx.xxx.xxx.xxx:24002";
         * "xxx.xxx.xxx.xxx"为集群中ZooKeeper所在节点的业务IP，端口默认是24002
         */
        zkQuorum = CLIENT_INFO.getProperty("zk.quorum");
        auth = CLIENT_INFO.getProperty("auth");
        sasl_qop = CLIENT_INFO.getProperty("sasl.qop");
        zooKeeperNamespace = CLIENT_INFO.getProperty("zooKeeperNamespace");
        serviceDiscoveryMode = CLIENT_INFO.getProperty("serviceDiscoveryMode");
        principal = CLIENT_INFO.getProperty("principal");
        auditAddition = CLIENT_INFO.getProperty("auditAddition");
        System.setProperty("java.security.krb5.conf", krb5Conf);
        // 设置新建用户的USER_NAME，其中"xxx"指代之前创建的用户名，例如创建的用户为user，则USER_NAME为user
        //USER_NAME = userName;

        if ("KERBEROS".equalsIgnoreCase(auth)) {
            // 设置客户端的keytab和zookeeper认证principal
            ZOOKEEPER_DEFAULT_SERVER_PRINCIPAL = "zookeeper/" + getUserRealm();
            System.setProperty(ZOOKEEPER_SERVER_PRINCIPAL_KEY, ZOOKEEPER_DEFAULT_SERVER_PRINCIPAL);
        }
        //zookeeper开启ssl时需要设置JVM参数
        LoginUtil.processZkSsl(CLIENT_INFO);

        // 拼接JDBC URL
        StringBuilder strBuilder = new StringBuilder("jdbc:hive2://").append(zkQuorum).append("/");
        if ("KERBEROS".equalsIgnoreCase(auth)) {
            strBuilder
                    .append(";serviceDiscoveryMode=")
                    .append(serviceDiscoveryMode)
                    .append(";zooKeeperNamespace=")
                    .append(zooKeeperNamespace)
                    .append(";sasl.qop=")
                    .append(sasl_qop)
                    .append(";auth=")
                    .append(auth)
                    .append(";principal=")
                    .append(principal)
                    .append(";user.principal=")
                    .append(userName)
                    .append(";user.keytab=")
                    .append(userKeytab);
        } else {
            /* 普通模式 */
            strBuilder
                    .append(";serviceDiscoveryMode=")
                    .append(serviceDiscoveryMode)
                    .append(";zooKeeperNamespace=")
                    .append(zooKeeperNamespace)
                    .append(";auth=none");
        }
        if (auditAddition != null && !auditAddition.isEmpty()) {
            strBuilder.append(";auditAddition=").append(auditAddition);
        }
        String url = strBuilder.toString();
        // 加载Hive JDBC驱动
        Class.forName(HIVE_DRIVER);
        Connection connection = null;
        try {
            // 获取JDBC连接
            // 如果使用的是普通模式，那么第二个参数需要填写正确的用户名，否则会以匿名用户(anonymous)登录
            connection = DriverManager.getConnection(url, "", "");
            logger.info("Create connection success!!!!");
        } catch (Exception e) {
            logger.error("Create connection failed." ,e);
        } finally {
            // 关闭JDBC连接
            /*if (null != connection) {
                connection.close();
            }*/
        }
        return connection;
    }

}
