package avicit.bdp.dcs.utils;

import avicit.bdp.core.constant.Constants;
import com.csvreader.CsvReader;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;

/**
 * @金航数码科技有限责任公司
 * @作者：developer
 * @邮箱：developer@avic-digital.com
 * @创建时间： 2020-10-20
 * @类说明：ExcelUtils
 * @修改记录：
 * @注意事项：
 * @主要功能：HIVE JDBC操作
 */
public class HiveTest {

    /**
     * 分隔符，用于解析tableName对应的location
     */
    private static String DELIMITER_LOCATION = "LOCATION";

    /**
     * 分隔符，用于解析tableName对应的location
     */
    private static String DELIMITER_TBL = "TBLPROPERTIES";

    /**
     * 获取当前database下所有tables
     * @param url
     * @param username
     * @param password
     * @return
     */
    public static List<String> getAllTablesByDbName(Connection conn, String url, String username, String password) {
        Statement stmt = null;
        ResultSet rs = null;
        List<String> tables = new ArrayList<String>();

        try {
            // step1:初始化连接
            stmt = conn.createStatement();

            // step2:构建SQL语句
            String sql = "show tables";

            // step3:执行SQL语句
            rs = stmt.executeQuery(sql);
            while (rs.next()) {
                tables.add(rs.getString(1));
            }
        } catch (Exception e) {
            System.out.println("获取所有表失败，errMsg=" + e.getMessage());
        } finally {
//            destory(conn, stmt, rs);
        }

        return tables;
    }

    public static String getHiveTableLocation1(Connection conn, String tableName) {
        String path = null;
        Statement stmt = null;
        ResultSet rs = null;

        try {
            // step1:初始化连接
            stmt = conn.createStatement();

            // step2:构建SQL语句
            String sql = "show create table " + tableName;

            // step3:执行SQL语句
            StringBuilder sb = new StringBuilder();
            rs = stmt.executeQuery(sql);
            while (rs.next()) {
                sb.append( rs.getString(1));
            }

            // step4:从查询结果中解析path
            String[] tmpStrs1 = sb.toString().split(DELIMITER_LOCATION);
            if (tmpStrs1 == null || tmpStrs1.length < 2) {
                System.out.println("获取表:{}对应的HDFS文件路径失败" + tableName);
            }

            String[] tmpStrs2 = tmpStrs1[1].split(DELIMITER_TBL);
            if (tmpStrs2 == null || tmpStrs2.length < 2) {
                System.out.println("获取表:{}对应的HDFS文件路径失败" + tableName);
            }

            String[] tmpStrs3 = tmpStrs2[0].trim().split("'");
            if (tmpStrs3 == null || tmpStrs3.length < 2) {
                System.out.println("获取表:{}对应的HDFS文件路径失败" + tableName);
            }
            // 获取HDFS路径，示例：hdfs://hdp28:8020/warehouse/tablespace/managed/hive/test.db/devicemm1
            String tmpPath = tmpStrs3[1];

            // step5:去掉前面defaultFS信息（hdfs://hdp28:8020），仅保留绝对路径信息（/warehouse/tablespace/managed/hive/test.db/devicemm1）
            String[] tmpStrs4 = tmpPath.split(":");
            if (tmpStrs4 == null || tmpStrs4.length < 3) {
                System.out.println("获取表:{}对应的HDFS文件路径失败" + tableName);
            }

            int firstIndex = tmpStrs4[2].indexOf("/");
            path = tmpStrs4[2].substring(firstIndex);
        } catch (Exception e) {
            e.printStackTrace();
            System.out.println("获取表:"+tableName+"对应的HDFS失败，errMsg={}" + e.getMessage());
        } finally {
//            destory(conn, stmt, rs);
        }
        return path;
    }

    /**
     * 销毁JDBC连接
     * @param conn
     * @param stmt
     * @param rs
     */
    private static void destory(Connection conn, Statement stmt, ResultSet rs) {
        try {
            if (rs != null) {
                rs.close();
            }
            if (stmt != null) {
                stmt.close();
            }
            if (conn != null) {
                conn.close();
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    private static Connection getConnection(String url, String username, String password){
        Connection conn = null;
        try {
            System.setProperty("java.security.krb5.conf",  PropertyUtil.getString("java.security.krb5.conf.path"));
            Configuration configuration = new Configuration();
            configuration.setBoolean("hadoop.security.authorization", true);
            configuration.set("hadoop.security.authentication", Constants.KERBEROS);
            UserGroupInformation.setConfiguration(configuration);
            UserGroupInformation.loginUserFromKeytab(Constants.LOGIN_USER_KEY_TAB_HIVE_USERNAME,
                    PropertyUtil.getString(Constants.LOGIN_USER_KEY_TAB_HIVE_PATH));
            UserGroupInformation logUser = UserGroupInformation.getLoginUser();

            Class.forName("org.apache.hive.jdbc.HiveDriver");
            conn = DriverManager.getConnection(url, username, password);
        } catch (Exception e) {
            e.printStackTrace();
        }
        return conn;
    }

    public static String getHiveTableLocation(String url, String username, String password, String tableName) {
        String path = null;
        Connection conn = null;
        Statement stmt = null;
        ResultSet rs = null;

        try {
            // step1:初始化连接
            Class.forName("org.apache.hive.jdbc.HiveDriver");
            conn = DriverManager.getConnection(url, username, password);
            stmt = conn.createStatement();

            // step2:构建SQL语句
            String sql = "show create table " + tableName;

            // step3:执行SQL语句
            StringBuilder sb = new StringBuilder();
            rs = stmt.executeQuery(sql);
            while (rs.next()) {
                sb.append( rs.getString(1));
            }

            // step4:从查询结果中解析path
            String[] tmpStrs1 = sb.toString().split(DELIMITER_LOCATION);
            if (tmpStrs1 == null || tmpStrs1.length < 2) {
                System.out.println("获取表:{}对应的HDFS文件路径失败" + tableName);
            }

            String[] tmpStrs2 = tmpStrs1[1].split(DELIMITER_TBL);
            if (tmpStrs2 == null || tmpStrs2.length < 2) {
                System.out.println("获取表:{}对应的HDFS文件路径失败" + tableName);
            }

            String[] tmpStrs3 = tmpStrs2[0].trim().split("'");
            if (tmpStrs3 == null || tmpStrs3.length < 2) {
                System.out.println("获取表:{}对应的HDFS文件路径失败" + tableName);
            }
            // 获取HDFS路径，示例：hdfs://hdp28:8020/warehouse/tablespace/managed/param/test.db/devicemm1
            String tmpPath = tmpStrs3[1];

            // step5:去掉前面defaultFS信息（hdfs://hdp28:8020），仅保留绝对路径信息（/warehouse/tablespace/managed/param/test.db/devicemm1）
            String[] tmpStrs4 = tmpPath.split(":");
            if (tmpStrs4 == null || tmpStrs4.length < 3) {
                System.out.println("获取表:{}对应的HDFS文件路径失败" + tableName);
            }

            int firstIndex = tmpStrs4[2].indexOf("/");
            path = tmpStrs4[2].substring(firstIndex);
        } catch (Exception e) {
            e.printStackTrace();
            System.out.println("获取表:"+tableName+"对应的HDFS失败，errMsg={}" + e.getMessage());
        } finally {
//            destory(conn, stmt, rs);
        }

        return path;
    }

    private static FileSystem getFileSystem() throws Exception {
        System.setProperty("java.security.krb5.conf", PropertyUtil.getString("java.security.krb5.conf.path"));
        Configuration configuration = new Configuration();
        String defaultFS = PropertyUtil.getString("fs.defaultFS");
        configuration.set("fs.defaultFS", defaultFS);
        configuration.set("hadoop.security.authentication", Constants.KERBEROS);
        UserGroupInformation.setConfiguration(configuration);

        String kerberosPrincipal = PropertyUtil.getString("login.user.keytab.username");
        String kerberosKeytabFilePath = PropertyUtil.getString("login.user.keytab.path");
        try {
            UserGroupInformation.loginUserFromKeytab(kerberosPrincipal, kerberosKeytabFilePath);
            System.out.println("kerberos认证成功");
        } catch (Exception e) {
            e.printStackTrace();
            String message = String.format(
                    "kerberos认证失败,请确定kerberosKeytabFilePath[%s]和kerberosPrincipal[%s]填写正确",
                    kerberosKeytabFilePath, kerberosPrincipal);
            System.out.println(message);
        }

        FileSystem fileSystem = FileSystem.get(configuration);
        return fileSystem;
    }

    private static String[] splitBufferedReader(CsvReader csvReader) throws IOException {
        String[] splitedResult = null;
        if (csvReader.readRecord()) {
            splitedResult = csvReader.getValues();
        }
        return splitedResult;
    }

    private static void test() throws Exception {
        String filepath = "hdfs://192.168.0.28:8020/dcs/动力信息/aaa.csv";
        Path path = new Path(filepath);
        FileSystem fs = getFileSystem();
        InputStream in = fs.open(path);

        String encoding = "UTF-8";
        int bufferSize = 8192;
        BufferedReader reader = new BufferedReader(new InputStreamReader(in, encoding), bufferSize);

        char fieldDelimiter = ',';
        CsvReader csvReader = new CsvReader(reader);
        csvReader.setDelimiter(fieldDelimiter);

        String[] parseRows;
        while ((parseRows = splitBufferedReader(csvReader)) != null) {
            for (String columnValue : parseRows) {
                System.out.println("columnValue = " + columnValue);
            }
        }
    }

    public static void main(String[] args) throws Exception {
//        String url = "jdbc:hive2://192.168.0.28:2181/test;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=hiveserver2;principal=hive/hdp28@BDP.COM";
        test();
        String url = PropertyUtil.getString("url");
        String username = PropertyUtil.getString("username"); //"root";
        String password = PropertyUtil.getString("password"); //"rootroothdp29";
        String tableName = PropertyUtil.getString("tableName");
        tableName = "aaaa";

//        System.out.println("url = " + url);
//        Connection conn = getConnection(url, username, password);
//        System.out.println("conn = " + conn);
//
//        String sql1 = "select * from " + tableName;
//        Statement stmt = conn.createStatement();
//        ResultSet rs = stmt.executeQuery(sql1);
//        while (rs.next()) {
//            Object id = rs.getObject("id");
//            Object obj = rs.getObject("title");
//            System.out.println(id + "--" + obj);
//        }


//        List<String> tables = getAllTablesByDbName(conn, url, username, password);
//        System.out.println(tables);

//        String path = getHiveTableLocation1(conn, tableName);
//        System.out.println("Path = " + path);
//
//        String path1 = getHiveTableLocation(url, username, password, tableName);
//        System.out.println("Path1 = " + path1);

    }

}
