package com.sh.data.engine.domain.shims.hive.util;

import com.google.common.base.Splitter;
import com.sh.data.engine.domain.base.model.entity.FieldInfo;
import com.sh.data.engine.domain.shims.db.model.PreviewDataDomain;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;

import java.net.ConnectException;
import java.net.SocketTimeoutException;
import java.sql.*;
import java.util.*;
import java.util.stream.Collectors;

public class HiveUtil {

    static {
        try {
            Class.forName("org.apache.hive.jdbc.HiveDriver");
        } catch (ClassNotFoundException e) {
            throw new Error(e);
        }
    }

    private static final String HIVE_TYPE_TINYINT = "TINYINT";
    private static final String HIVE_TYPE_INT = "INT";
    private static final String HIVE_TYPE_BIGINT = "BIGINT";
    private static final String HIVE_TYPE_FLOAT = "FLOAT";
    private static final String HIVE_TYPE_DOUBLE = "DOUBLE";
    private static final String HIVE_TYPE_STRING = "STRING";
    private static final String HIVE_TYPE_BOOLEAN = "BOOLEAN";
    private static final String HIVE_TYPE_BINARY = "BINARY";
    private static final String HIVE_TYPE_DECIMAL = "DECIMAL";
    private static final String HIVE_TYPE_DATE = "DATE";
    private static final String HIVE_TYPE_TIMESTAMP = "TIMESTAMP";

    /**
     * 转换为hive字段类型
     *
     * @param sqlType
     * @return
     */
    public static String toHiveType(int sqlType) {
        switch (sqlType) {
            case Types.INTEGER:
            case Types.SMALLINT:
                return HIVE_TYPE_INT;
            case Types.VARCHAR:
            case Types.CHAR:
            case Types.LONGVARCHAR:
            case Types.NVARCHAR:
            case Types.NCHAR:
            case Types.LONGNVARCHAR:
            case Types.CLOB:
            case Types.NCLOB:
                return HIVE_TYPE_STRING;
            case Types.DATE:
                return HIVE_TYPE_DATE;
            case Types.TIME:
            case Types.TIMESTAMP:
                return HIVE_TYPE_TIMESTAMP;
            case Types.NUMERIC:
            case Types.DECIMAL:
            case Types.FLOAT:
            case Types.DOUBLE:
            case Types.REAL:
                return HIVE_TYPE_DOUBLE;
            case Types.BIT:
            case Types.BOOLEAN:
                return HIVE_TYPE_BOOLEAN;
            case Types.TINYINT:
                return HIVE_TYPE_TINYINT;
            case Types.BIGINT:
                return HIVE_TYPE_BIGINT;
            case Types.BLOB:
            case Types.BINARY:
            case Types.VARBINARY:
            case Types.LONGVARBINARY:
                return HIVE_TYPE_BINARY;
            default:
                return HIVE_TYPE_STRING;
        }
    }

    public static void updateHiveType(List<FieldInfo> list) {
        if (CollectionUtils.isEmpty(list)) {
            return;
        }
        list.stream()
            .filter(Objects::nonNull)
            .forEach(
                e -> {
                    Integer dataType = e.getDataType();
                    String hiveType = toHiveType(dataType);
                    if (StringUtils.isBlank(hiveType)) {
                        return;
                    }
                    e.setHiveType(hiveType);
                });
    }

    public static String getCreateTableSql(
        String table,
        String comment,
        List<FieldInfo> fieldList,
        List<String> partitionList,
        boolean supportRTF) {
        if (CollectionUtils.isEmpty(fieldList)) {
            return "";
        }

        if (comment == null) {
            Optional<FieldInfo> optional =
                fieldList.stream()
                    .filter(Objects::nonNull)
                    .filter(e -> StringUtils.isNotBlank(e.getTableComment()))
                    .findFirst();
            if (optional.isPresent()) {
                comment = optional.get().getTableComment();
            }
        }

        updateHiveType(fieldList);

        StringBuilder sb = new StringBuilder(1024);
        sb.append("CREATE TABLE if not exists `");
        sb.append(table);
        sb.append("`(");

        for (int index = 0; index < fieldList.size(); index++) {
            FieldInfo field = fieldList.get(index);
            sb.append("`")
                .append(field.getFieldName())
                .append("`")
                .append(" ")
                .append(field.getHiveType())
                .append(" ")
                .append("COMMENT")
                .append(" ")
                .append("'")
                .append(StringUtils.isBlank(field.getComment()) ? " " : field.getComment())
                .append("'");
            if (index < fieldList.size() - 1) {
                sb.append(",").append("\n");
            } else {
                sb.append(")").append("\n");
            }
        }
        sb.append("COMMENT")
            .append(" ")
            .append("'")
            .append(StringUtils.isBlank(comment) ? " " : comment)
            .append("'")
            .append("\n");

        if (CollectionUtils.isNotEmpty(partitionList)) {
            sb.append("PARTITIONED BY (");
            for (int index = 0; index < partitionList.size(); index++) {
                String partition = partitionList.get(index);
                sb.append("`").append(partition).append("`").append(" ").append("string");
                if (index < partitionList.size() - 1) {
                    sb.append(",").append("\n");
                } else {
                    sb.append(")").append("\n");
                }
            }
        }
        // 支持富文本
        if (supportRTF) {
            sb.append("STORED AS ORC");
        } else {
            sb.append("STORED AS TEXTFILE");
        }

        return sb.toString();
    }

    public static String getCreateFunctionSql(
        String functionName, String packageName, String jarPath) {
        String createFunction =
            String.format(
                "create temporary function %s AS '%s' using jar '%s'",
                functionName, packageName, jarPath);
        return createFunction;
    }

    public static void testConnection(
        String jdbcUrl, String username, String password, Integer timeout) throws SQLException {
        if (timeout == null) {
            timeout = 10;
        }
        int loginTimeout = DriverManager.getLoginTimeout();
        DriverManager.setLoginTimeout(timeout); // 再设置成默认值，以防对其他地方有影响
        try (Connection conn = DriverManager.getConnection(jdbcUrl, username, password)) {
        } catch (SQLException e) {
            throw new SQLException(toReadableMsg(e), e);
        } finally {
            DriverManager.setLoginTimeout(loginTimeout);
        }
    }

    public static Connection createConnection(String jdbcUrl, String username, String password)
        throws SQLException {
        Properties props = new Properties();
        props.put("user", username);
        if (StringUtils.isNotBlank(password)) {
            props.put("password", password);
        }
        return DriverManager.getConnection(jdbcUrl, props);
    }

    public static List<String> getTableList(Connection conn) throws SQLException {
        String sql = "show tables";
        List<String> list = new LinkedList<>();
        try (PreparedStatement pstmt = conn.prepareStatement(sql);
             ResultSet resultSet = pstmt.executeQuery()) {
            while (resultSet != null && resultSet.next()) {
                list.add(resultSet.getString(1));
            }
        }
        return list;
    }

    public static List<String> getViewList(Connection conn) throws SQLException {
        return Collections.emptyList();
    }

    public static String getCreateTableSql(Connection conn, String table) throws SQLException {
        String sql = "show create table " + table;
        List<String> list = new LinkedList<>();
        try (PreparedStatement pstmt = conn.prepareStatement(sql);
             ResultSet resultSet = pstmt.executeQuery()) {
            while (resultSet != null && resultSet.next()) {
                list.add(resultSet.getString(1));
            }
        }
        return list.stream().collect(Collectors.joining(" "));
    }

    public static List<FieldInfo> getFieldList(Connection conn, String tableName)
        throws SQLException {
        DatabaseMetaData metaData = conn.getMetaData();
        String catalog = conn.getCatalog();
        String schema = null;

        String tableRemark = null;
        //    try (ResultSet rs = metaData.getTables(catalog, schema, tableName, null)) {
        //      if (rs != null && rs.next()) {
        //        tableRemark = rs.getString("REMARKS");
        //      }
        //    }

        List<FieldInfo> list = new LinkedList<>();
        try (ResultSet rs = metaData.getColumns(catalog, schema, tableName, null)) {
            while (rs != null && rs.next()) {
                int dataType = rs.getInt("DATA_TYPE");
                String typeName = rs.getString("TYPE_NAME");
                String columnName = rs.getString("COLUMN_NAME");
                int columnSize = rs.getInt("COLUMN_SIZE");
                int decimalDigits = rs.getInt("DECIMAL_DIGITS");
                int nullable = rs.getInt("NULLABLE"); // 字段是否可为NULL，0-NO  1-YES
                String remarks = rs.getString("REMARKS");
                String columnDef = rs.getString("COLUMN_DEF");

                FieldInfo fieldInfo =
                    FieldInfo.builder()
                        .tableName(tableName)
                        .tableComment(tableRemark)
                        .dataType(dataType)
                        .fieldName(columnName)
                        .fieldType(typeName)
                        .columnSize(columnSize)
                        .decimalDigits(decimalDigits)
                        .isNullable(nullable)
                        .defaultValue(columnDef)
                        .comment(remarks)
                        .build();
                list.add(fieldInfo);
            }
        }

        if (CollectionUtils.isEmpty(list)) {
            return list;
        }

        Map<String, FieldInfo> map =
            list.stream().collect(Collectors.toMap(e -> e.getFieldName(), e -> e));

        String sql = "desc " + tableName;
        try (PreparedStatement pstmt = conn.prepareStatement(sql);
             ResultSet rs = pstmt.executeQuery()) {
            int blankNum = 0;
            while (rs != null && rs.next()) {
                String fieldName = rs.getString(1);
                if (StringUtils.isBlank(fieldName)) {
                    blankNum++;
                    continue;
                }

                // 2个空行之后，才是分区信息
                if (blankNum == 2) {
                    if (map.get(fieldName) != null) {
                        map.get(fieldName).setIsPartition(1);
                    }
                }
            }
        }
        return list;
    }

    public static PreviewDataDomain previewData(Connection conn, String tableName, boolean trimDbName)
        throws SQLException {
        String sql = "SELECT * from " + tableName + " LIMIT 10";
        try (PreparedStatement pstmt = conn.prepareStatement(sql);
             ResultSet rs = pstmt.executeQuery()) {
            if (rs == null) {
                return PreviewDataDomain.builder().build();
            }

            List<String> fieldNameList = new LinkedList<>();
            ResultSetMetaData meta = rs.getMetaData(); // 获取键名
            int columnCount = meta.getColumnCount(); // 获取行的数量
            for (int i = 1; i <= columnCount; i++) {
                String columnName = meta.getColumnName(i);
                // 去掉数据库名称
                if (trimDbName) {
                    columnName = StringUtils.substringAfter(columnName, ".");
                }
                fieldNameList.add(columnName);
            }

            List<List<Object>> dataList = new LinkedList<>();
            while (rs.next()) {
                List<Object> rowData = new LinkedList<>();
                for (int i = 1; i <= columnCount; i++) {
                    rowData.add(rs.getObject(i));
                }
                dataList.add(rowData);
            }
            return PreviewDataDomain.builder().fieldNameList(fieldNameList).dataList(dataList).build();
        }
    }

    public static void closeConnection(Connection conn) {
        if (conn == null) {
            return;
        }
        try {
            conn.close();
        } catch (Exception ignore) {
        }
    }

    private static Throwable getThrowable(Throwable t) {
        while (true) {
            Throwable cause = t.getCause();
            if (cause == null) {
                return t;
            }
            t = t.getCause();
        }
    }

    private static String toReadableMsg(SQLException e) {
        String connectionErrMsg = "连接失败，请检查数据库服务是否可用或数据库链接是否正确";
        Throwable t = getThrowable(e);
        if (t instanceof SocketTimeoutException || t instanceof ConnectException) {
            return connectionErrMsg;
        }
        return "连接失败，请检查用户名、密码、数据库名等参数是否正确";
    }

    /**
     * 获取hive用户名
     *
     * @param userId
     * @return
     */
    public static String getHiveUserNameByUserId(Long userId) {
        String md5Username = DigestUtils.md5Hex(String.valueOf(userId));
        return md5Username.substring(4, 24); // 截取20位
    }

    /**
     * 去除statement中的注释
     *
     * @param statement
     * @return
     * @see org.apache.hive.common.util.HiveStringUtils
     */
    public static String removeComments(String statement) {
        if (statement == null) {
            return null;
        }
        Iterator<String> iterator = Splitter.on("\n").omitEmptyStrings().split(statement).iterator();
        int[] startQuote = {-1};
        StringBuilder ret = new StringBuilder(statement.length());
        while (iterator.hasNext()) {
            String lineWithComments = iterator.next();
            String lineNoComments = removeComments(lineWithComments, startQuote);
            ret.append(lineNoComments);
            if (iterator.hasNext() && !lineNoComments.isEmpty()) {
                ret.append("\n");
            }
        }
        return ret.toString();
    }

    /**
     * 去除statement中的注释
     *
     * @param line
     * @param startQuote
     * @return
     * @see org.apache.hive.common.util.HiveStringUtils
     */
    private static String removeComments(String line, int[] startQuote) {
        if (line == null || line.isEmpty()) {
            return line;
        }
        if (startQuote[0] == -1 && isComment(line)) {
            return ""; // assume # can only be used at the beginning of line.
        }
        StringBuilder builder = new StringBuilder();
        for (int index = 0; index < line.length(); ) {
            if (startQuote[0] == -1
                && index < line.length() - 1
                && line.charAt(index) == '-'
                && line.charAt(index + 1) == '-') {
                // Jump to the end of current line. When a multiple line query is executed with -e
                // parameter,
                // it is passed in as one line string separated with '\n'
                for (; index < line.length() && line.charAt(index) != '\n'; ++index) {
                    ;
                }
                continue;
            }

            char letter = line.charAt(index);
            if (startQuote[0] == letter && (index == 0 || line.charAt(index - 1) != '\\')) {
                startQuote[0] = -1; // Turn escape off.
            } else if (startQuote[0] == -1
                && (letter == '\'' || letter == '"')
                && (index == 0 || line.charAt(index - 1) != '\\')) {
                startQuote[0] = letter; // Turn escape on.
            }

            builder.append(letter);
            index++;
        }

        return builder.toString().trim();
    }

    /**
     * @param line
     * @return
     * @see org.apache.hive.common.util.HiveStringUtils
     */
    private static boolean isComment(String line) {
        // SQL92 comment prefix is "--"
        // beeline also supports shell-style "#" prefix
        String lineTrimmed = line.trim();
        return lineTrimmed.startsWith("#") || lineTrimmed.startsWith("--");
    }

    /**
     * 判断给定语句是否是Hive命令
     *
     * @param statement
     * @return
     */
    public static boolean isHiveCommand(String statement) {
        if (StringUtils.isBlank(statement)) {
            return false;
        }
        return !isHiveSQL(statement);
    }

    /**
     * 判断给定语句是否是Hive SQL
     *
     * @param statement
     * @return
     */
    public static boolean isHiveSQL(String statement) {
        if (StringUtils.isBlank(statement)) {
            return false;
        }
        String cleanStatement = removeComments(statement);
        String[] tokens = cleanStatement.trim().split("\\s+");
        // 关闭测试模式
        HiveCommand hiveCommand = HiveCommand.find(tokens, false);
        if (hiveCommand == null || StringUtils.isBlank(tokens[0])) {
            return true;
        }
        return false;
    }

    /**
     * @author david
     * @see org.apache.hadoop.hive.ql.processors.HiveCommand
     */
    private static enum HiveCommand {
        SET(),
        RESET(),
        DFS(),
        CRYPTO(true),
        ERASURE(true),
        ADD(),
        LIST(),
        RELOAD(),
        DELETE(),
        COMPILE();

        @SuppressWarnings("unused")
        public static final boolean ONLY_FOR_TESTING = true;

        private boolean usedOnlyForTesting;

        HiveCommand() {
            this(false);
        }

        HiveCommand(boolean onlyForTesting) {
            this.usedOnlyForTesting = onlyForTesting;
        }

        public boolean isOnlyForTesting() {
            return this.usedOnlyForTesting;
        }

        private static final Set<String> COMMANDS = new HashSet<String>();

        static {
            for (HiveCommand command : HiveCommand.values()) {
                COMMANDS.add(command.name());
            }
        }

        @SuppressWarnings("unused")
        public static HiveCommand find(String[] command) {
            return find(command, false);
        }

        public static HiveCommand find(String[] command, boolean findOnlyForTesting) {
            if (null == command) {
                return null;
            }
            String cmd = command[0];
            if (cmd != null) {
                cmd = cmd.trim().toUpperCase();
                if (command.length > 1 && "role".equalsIgnoreCase(command[1])) {
                    // special handling for set role r1 statement
                    return null;
                } else if (command.length > 1 && "from".equalsIgnoreCase(command[1])) {
                    // special handling for SQL "delete from <table> where..."
                    return null;
                } else if (command.length > 1
                    && "reload".equalsIgnoreCase(command[0])
                    && "function".equalsIgnoreCase(command[1])) {
                    // special handling for SQL "reload function"
                    return null;
                } else if (command.length > 1
                    && "set".equalsIgnoreCase(command[0])
                    && "autocommit".equalsIgnoreCase(command[1])) {
                    return null; // don't want set autocommit true|false to get mixed with set hive.foo.bar...
                } else if (COMMANDS.contains(cmd)) {
                    HiveCommand hiveCommand = HiveCommand.valueOf(cmd);

                    if (findOnlyForTesting == hiveCommand.isOnlyForTesting()) {
                        return hiveCommand;
                    }

                    return null;
                }
            }
            return null;
        }
    }

    public static void main(String... args) throws SQLException {
        String jdbcUrl = "jdbc:hive2://10.88.36.232:10000/471a6ed04922a0d7_default";
        String username = "5760f8f587711c2db711";
        String password = "123456";
        Integer timeout = 1;
        testConnection(jdbcUrl, username, password, timeout);
        Long start = System.currentTimeMillis();
        Connection conn = createConnection(jdbcUrl, username, password);
        //    getTableList(conn).forEach(System.out::println);
        //        getFieldList(conn, "ods_ddddddddd").forEach(System.out::println);
        System.out.println(getCreateTableSql(conn, "ods_ddddddddd"));
        //    System.out.println(getCreateTableSql(conn, "ods_ddddddddd"));
        System.out.println(">>>>>>>>>> " + (System.currentTimeMillis() - start));
        //    System.out.println(previewData(conn, "ods_tb_test_4", true));
    }
}
