/*
 * Copyright (c) Huawei Technologies Co., Ltd. 2012-2019. All rights reserved.
 */


import com.audaque.cloud.iqs.ZkHiveBean;
import com.audaque.cloud.iqs.utils.KerberosUtil;
import com.audaque.cloud.iqs.utils.LoginUtil;
import org.apache.commons.dbutils.QueryRunner;
import org.apache.commons.dbutils.handlers.MapListHandler;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.sql.*;
import java.util.*;

/**
 * JDBCExamplePreLogin
 *
 * @author
 * @since 8.0.0
 */
public class JDBCExamplePreLogin {

    private static final Logger logger = LoggerFactory.getLogger(JDBCExamplePreLogin.class);

    private static ZkHiveBean init() throws IOException {

        Properties clientInfo = null;
        String userdir = "D:\\code\\java\\iqs\\iqs-mrs\\src\\test\\resources\\";
        InputStream fileInputStream = null;
        try {
            clientInfo = new Properties();
            String hiveclientProp = userdir + "2d52254543ef58b2ffab80a535a289ac.properties";
            File propertiesFile = new File(hiveclientProp);
            fileInputStream = new FileInputStream(propertiesFile);
            clientInfo.load(fileInputStream);
        } catch (IOException e) {
            throw new IOException(e);
        } finally {
            if (fileInputStream != null) {
                fileInputStream.close();
                fileInputStream = null;
            }
        }

        ZkHiveBean zkHiveBean = new ZkHiveBean();
        zkHiveBean.setZkQuorum(clientInfo.getProperty("zk.quorum"));
        zkHiveBean.setAuth(clientInfo.getProperty("auth"));
        zkHiveBean.setSaslQop(clientInfo.getProperty("sasl.qop"));
        zkHiveBean.setZooKeeperNamespace(clientInfo.getProperty("zooKeeperNamespace"));
        zkHiveBean.setServiceDiscoveryMode(clientInfo.getProperty("serviceDiscoveryMode"));
        zkHiveBean.setPrincipal(clientInfo.getProperty("principal"));
        zkHiveBean.setAuditAddition(clientInfo.getProperty("auditAddition"));
        String krb5File = userdir + "c28d9db84548195920c4536bbc957363.conf";
        System.setProperty("java.security.krb5.conf", krb5File);
        zkHiveBean.setUserName("linhaorui");
        zkHiveBean.setDriverClass("org.apache.hive.jdbc.HiveDriver");
        String userKeytabFile = userdir + "2f8cc5c29b978da652320e0ae6331d4b.keytab";

        if ("KERBEROS".equalsIgnoreCase(zkHiveBean.getAuth())) {
            // 设置客户端的keytab和zookeeper认证配置
            String zookeeperDefaultServerPrincipal = "zookeeper/" + KerberosUtil.getUserRealm();
            LoginUtil.setJaasConf("Client", zkHiveBean.getUserName(), userKeytabFile);
            LoginUtil.setZookeeperServerPrincipal("zookeeper.server.principal", zookeeperDefaultServerPrincipal);

            // 安全模式
            // Zookeeper登录认证
            LoginUtil.login(zkHiveBean.getUserName(), userKeytabFile, krb5File, new Configuration());
        }

        //zookeeper开启ssl时需要设置JVM参数
        LoginUtil.processZkSsl(clientInfo);
        return zkHiveBean;
    }


    public static void main(String[] args) throws ClassNotFoundException, SQLException, IOException {

        ZkHiveBean zkHiveBean = init();

        // 拼接JDBC URL
        StringBuilder strBuilder = new StringBuilder("jdbc:hive2://").append(zkHiveBean.getZkQuorum()).append("/");

        if ("KERBEROS".equalsIgnoreCase(zkHiveBean.getAuth())) {
            strBuilder.append(";serviceDiscoveryMode=")
                    .append(zkHiveBean.getServiceDiscoveryMode())
                    .append(";zooKeeperNamespace=")
                    .append(zkHiveBean.getZooKeeperNamespace())
                    .append(";sasl.qop=")
                    .append(zkHiveBean.getSaslQop())
                    .append(";auth=")
                    .append(zkHiveBean.getAuth())
                    .append(";principal=")
                    .append(zkHiveBean.getPrincipal());
        } else {
            // 普通模式
            strBuilder.append(";serviceDiscoveryMode=")
                    .append(zkHiveBean.getServiceDiscoveryMode())
                    .append(";zooKeeperNamespace=")
                    .append(zkHiveBean.getZooKeeperNamespace())
                    .append(";auth=none");
        }
        if (zkHiveBean.getAuditAddition() != null && !zkHiveBean.getAuditAddition().isEmpty()) {
            strBuilder.append(";auditAddition=").append(zkHiveBean.getAuditAddition());
        }
        String url = strBuilder.toString();

        Class.forName(zkHiveBean.getDriverClass());

        Connection connection = null;
        try {
            connection = DriverManager.getConnection(url, zkHiveBean.getUserName(), "");

            DatabaseMetaData dbMetaData = connection.getMetaData();
            ResultSet trs = dbMetaData.getTables(null, "default", "%", new String[]{"TABLE"});
            List<Map<String,Object>> list = new ArrayList<>();
            while (trs.next()) {
                Map<String,Object> map = new HashMap<>(16);
                map.put("table_name",trs.getString("TABLE_NAME"));
                map.put("table_comment",trs.getString("REMARKS"));
                list.add(map);
            }
            for(Map map:list) {
                logger.info("tst:"+map);
            }


            //24/07/31 15:24:14 [main]: INFO example.JDBCExamplePreLogin: tst:{table_comment=null, table_name=accident333}
            //24/07/31 15:24:14 [main]: INFO example.JDBCExamplePreLogin: tst:{table_comment=null, table_name=accident334}
            //24/07/31 15:24:14 [main]: INFO example.JDBCExamplePreLogin: tst:{table_comment=null, table_name=test01}

            QueryRunner qRunner = new QueryRunner();
            String sql = "select * from accident333";
            Object object = qRunner.query(connection, sql, new MapListHandler());
            List<Map<String, Object>> list1 = (List<Map<String, Object>>) object;

            for(Map map:list1) {
                logger.info("tst:"+map);
            }
            connection.close();
            connection = null;


            // 建表
            // 表建完之后，如果要往表中导数据，可以使用LOAD语句将数据导入表中，比如从HDFS上将数据导入表:
            // load data inpath '/tmp/employees.txt' overwrite into table employees_info;


           // execDDL(connection, sqls[0]);
            //logger.info("Create table success!");

            // 查询
            //execDML(connection, sqls[1]);

            // 删表
            //execDDL(connection, sqls[2]);
            //logger.info("Delete table success!");
        } finally {
            // 关闭JDBC连接
            if (null != connection) {
                connection.close();
            }
        }
    }



}
