package com.gitee.melin.bee.core.jdbc.dialect.impl;

import com.gitee.melin.bee.core.jdbc.ConnectionFactory;
import com.gitee.melin.bee.core.jdbc.QueryResult;
import com.gitee.melin.bee.core.jdbc.dialect.AbstractJdbcDialect;
import com.gitee.melin.bee.core.jdbc.enums.DataSourceType;
import com.gitee.melin.bee.core.jdbc.enums.FileFormat;
import com.gitee.melin.bee.core.jdbc.relational.*;
import com.gitee.melin.bee.util.JsonUtils;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.io.IOException;
import java.lang.reflect.Method;
import java.security.PrivilegedExceptionAction;
import java.sql.*;
import java.util.*;
import java.util.Date;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.security.SaslRpcServer;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class HiveJdbcDialect extends AbstractJdbcDialect {
    private static final Logger LOG = LoggerFactory.getLogger(HiveJdbcDialect.class);

    private Method hiveTypeToSqlTypeMethod;

    @Override
    public DataSourceType getDataSourceType() {
        return DataSourceType.HIVE;
    }

    public HiveJdbcDialect(ConnectionInfo connectionInfo) {
        super(connectionInfo);
        init();
    }

    public HiveJdbcDialect(Connection connection) {
        super(connection);
        init();
    }

    private void init() {
        try {
            Class<?> clazz = Class.forName("org.apache.hive.jdbc.JdbcColumn");
            hiveTypeToSqlTypeMethod = clazz.getMethod("hiveTypeToSqlType", String.class);
            hiveTypeToSqlTypeMethod.setAccessible(true);
        } catch (Exception e) {
            LOG.error(e.getMessage());
        }
    }

    protected <T> T runSecured(final Callable<T> securedCallable) {
        String authentication = "simple";
        try {
            UserGroupInformation userGroupInformation;
            if (StringUtils.isNotBlank(this.connectionInfo.getPrincipal())
                    || StringUtils.isNotBlank(this.connectionInfo.getKrb5File())
                    || StringUtils.isNotBlank(this.connectionInfo.getKeytabFile())) { // kerberos 认证

                authentication = "kerberos";
                userGroupInformation = loginToKerberos();
            } else {
                String hadoopPrincipal = this.connectionInfo.getUsername();
                userGroupInformation =
                        UserGroupInformation.createRemoteUser(hadoopPrincipal, SaslRpcServer.AuthMethod.SIMPLE);
                UserGroupInformation.setLoginUser(userGroupInformation);
            }

            return userGroupInformation.doAs((PrivilegedExceptionAction<T>) securedCallable::call);
        } catch (Exception e) {
            String msg = "Authentication: " + authentication + " 登录失败: " + e.getMessage();
            throw new RuntimeException(msg, e);
        }
    }

    private UserGroupInformation loginToKerberos() throws IOException {
        System.setProperty("java.security.krb5.conf", this.connectionInfo.getKrb5File());
        KerberosName.resetDefaultRealm();
        // https://stackoverflow.com/questions/34616676/should-i-call-ugi-checktgtandreloginfromkeytab-before-every-action-on-hadoop
        UserGroupInformation connectUgi = UserGroupInformation.getCurrentUser();
        if (!connectUgi.isFromKeytab()) {
            UserGroupInformation.setConfiguration(connectionInfo.getConfiguration());

            String hadoopPrincipal = connectionInfo.getPrincipal();
            connectUgi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
                    hadoopPrincipal, this.connectionInfo.getKeytabFile());
        }

        connectUgi.checkTGTAndReloginFromKeytab();

        return connectUgi;
    }

    @Override
    protected Driver getDriver() {
        return loadDriver(DataSourceType.HIVE.getDriverClass());
    }

    @Override
    protected String getJdbcUrl() {
        String jdbcUrl = this.connectionInfo.getUrl();
        if (StringUtils.isNotBlank(this.connectionInfo.getHivePrincipal())
                || StringUtils.isNotBlank(this.connectionInfo.getKrb5File())
                || StringUtils.isNotBlank(this.connectionInfo.getKeytabFile())) { // kerberos 认证

            if (!StringUtils.contains(jdbcUrl, ";principal=")) {
                jdbcUrl = jdbcUrl + ";principal=" + this.connectionInfo.getHivePrincipal();
            }
        }

        return jdbcUrl;
    }

    @Override
    protected String getDDLColumnName(BeeTableType type) {
        return "createtab_stmt";
    }

    @Override
    public final List<String> getSchemas() {
        return runSecured(super::getSchemas);
    }

    @Override
    public ConnectionFactory createConnectionFactory(Map<String, String> properties) {
        return runSecured(() -> super.createConnectionFactory(properties));
    }

    @Override
    public Connection getConnection() throws SQLException {
        return runSecured(super::getConnection);
    }

    @Override
    public MetaDataSource testConnection(ConnectionInfo connectionInfo) {
        return runSecured(() -> super.testConnection(connectionInfo));
    }

    @Override
    protected List<String> listSchemas(Connection connection) {
        return runSecured(() -> super.listSchemas(connection));
    }

    @Override
    public List<String> getTableNames(String schemaName) {
        return runSecured(() -> super.getTableNames(schemaName));
    }

    @Override
    public List<MetaTable> getSchemaTables(String schemaName) {
        return runSecured(() -> this.getTables(schemaName));
    }

    @Override
    public List<MetaColumn> getSchemaColumns(String schemaName, String tableName) {
        return runSecured(() -> {
            try (Connection connection = this.getConnection()) {
                List<MetaColumn> columns = new ArrayList<>();

                connection.prepareStatement("set hive.ddl.output.format=json").execute();

                Map<String, JDBCType> jdbcTypeMap = Maps.newHashMap();
                try {
                    DatabaseMetaData metaData = connection.getMetaData();
                    ResultSet results = metaData.getColumns(null, schemaName, tableName, null);
                    while (results.next()) {
                        String columnName = results.getString("COLUMN_NAME");
                        int jdbcType = results.getInt("DATA_TYPE");
                        jdbcTypeMap.put(columnName, JDBCType.valueOf(jdbcType));
                    }
                } catch (SQLException e) {
                    throw new RuntimeException("Get Scheme Error: " + e.getMessage(), e);
                }

                String sql = String.format(" describe extended `%s`.`%s`", schemaName, tableName);
                try (PreparedStatement preparedStatement = connection.prepareStatement(sql);
                        ResultSet resultSet = preparedStatement.executeQuery()) {

                    ResultSetMetaData metaData = resultSet.getMetaData();
                    List<String> columnList = new ArrayList<>();
                    for (int i = 1; i <= metaData.getColumnCount(); i++) {
                        columnList.add(metaData.getColumnLabel(i));
                    }

                    while (resultSet.next()) {
                        String json = resultSet.getString("col_name");
                        Map<String, Object> map = JsonUtils.toJavaMap(json);
                        ArrayList<LinkedHashMap<String, String>> cols =
                                (ArrayList<LinkedHashMap<String, String>>) map.get("columns");

                        for (LinkedHashMap<String, String> colMap : cols) {
                            MetaColumn field = new MetaColumn();
                            field.setColumnName(colMap.get("name"));
                            field.setDataType(colMap.get("type"));
                            field.setComment(colMap.get("comment"));
                            columns.add(field);
                        }

                        LinkedHashMap<String, Object> tableInfo = (LinkedHashMap<String, Object>) map.get("tableInfo");
                        ArrayList<LinkedHashMap<String, String>> partitionKeys =
                                (ArrayList<LinkedHashMap<String, String>>) tableInfo.get("partitionKeys");

                        for (LinkedHashMap<String, String> keyMap : partitionKeys) {
                            for (MetaColumn columnInfo : columns) {
                                if (StringUtils.equals(keyMap.get("name"), columnInfo.getColumnName())) {
                                    columnInfo.setPartitionField(true);
                                    break;
                                }
                            }
                        }
                        break;
                    }
                }

                return columns;
            } catch (SQLException e) {
                throw new RuntimeException("Get Scheme Error: " + e.getMessage(), e);
            }
        });
    }

    @Override
    public String getCreateTableScript(BeeTableType type, String schemaName, String tableName) {
        return runSecured(() -> this.getCreateHiveTableScript(schemaName, tableName));
    }

    public String getCreateHiveTableScript(String databaseName, String tableName) {
        try (Connection connection = connectionFactory.openConnection();
                PreparedStatement dbStat =
                        connection.prepareStatement("SHOW CREATE table " + databaseName + "." + tableName)) {
            try (ResultSet dbResult = dbStat.executeQuery()) {
                StringBuffer createTableScript = new StringBuffer();
                while (dbResult.next()) {
                    createTableScript.append(dbResult.getString(1)).append("\n");
                }
                if (createTableScript.length() > 0) {
                    return createTableScript.toString();
                } else {
                    return "DDL is not available";
                }
            }
        } catch (SQLException e) {
            throw new RuntimeException("Get Scheme Error", e);
        }
    }

    @Override
    public QueryResult query(String sql) {
        return runSecured(() -> super.query(sql));
    }

    @Override
    public QueryResult query(String sql, int maxRecords) {
        return runSecured(() -> super.query(sql, maxRecords));
    }

    @Override
    public Boolean execute(String sql) {
        return runSecured(() -> super.execute(sql));
    }

    @Override
    public Boolean execute(String schema, String sql) {
        return runSecured(() -> super.execute(schema, sql));
    }

    @Override
    public Integer executeUpdate(String sql, Object... params) {
        return runSecured(() -> super.executeUpdate(sql, params));
    }

    @Override
    public Integer executeUpdate(SchemaIdentifier schema, String sql, Object... params) {
        return runSecured(() -> super.executeUpdate(schema, sql, params));
    }

    @Override
    public CompletableFuture<QueryResult> asyncQuery(String sql, int maxRecords) {
        return runSecured(() -> super.asyncQuery(sql, maxRecords));
    }

    @Override
    public CompletableFuture<QueryResult> asyncQuery(String schema, String sql, int maxRecords) {
        return runSecured(() -> super.asyncQuery(schema, sql, maxRecords));
    }

    @Override
    public MetaTable getSchemaTable(String schemaName, String tableName, boolean containColumn) {
        if (StringUtils.isBlank(schemaName)) {
            throw new IllegalStateException("schemaName can not blank");
        }

        if (StringUtils.isBlank(tableName)) {
            throw new IllegalStateException("tableName can not blank");
        }
        return runSecured(() -> this.getTable(schemaName, tableName));
    }

    protected List<MetaTable> getTables(String schemaName) {
        try (Connection connection = this.getConnection()) {
            connection.prepareStatement("set hive.ddl.output.format=json").execute();

            if (StringUtils.isNotBlank(schemaName)) {
                connection.setSchema(schemaName);
            }

            String sql = "show tables";
            try (PreparedStatement preparedStatement = connection.prepareStatement(sql);
                    ResultSet resultSet = preparedStatement.executeQuery()) {

                List<MetaTable> tableList = new ArrayList<>();
                while (resultSet.next()) {
                    String json = resultSet.getString("tab_name");
                    Map<String, Object> map = JsonUtils.toJavaMap(json);
                    ArrayList<String> tables = (ArrayList<String>) map.get("tables");
                    for (String table : tables) {
                        MetaTable tableInfo = new MetaTable();
                        tableInfo.setTableName(table);
                        tableList.add(tableInfo);
                    }
                }
                return tableList;
            }
        } catch (SQLException e) {
            throw new RuntimeException("Get Scheme Error: " + e.getMessage(), e);
        }
    }

    protected MetaTable getTable(String schemaName, String tableName) {
        try (Connection connection = this.getConnection();
                Statement statement = connection.createStatement()) {

            statement.executeUpdate("set hive.ddl.output.format=json");
            // 查询单个表信息
            String descTableSql = String.format(" describe extended `%s`.`%s`", schemaName, tableName);
            try (ResultSet rs = statement.executeQuery(descTableSql)) {
                while (rs.next()) {
                    String json1 = rs.getString("col_name");
                    Map<String, Object> map1 = JsonUtils.toJavaMap(json1);
                    LinkedHashMap<String, Object> tableInfoMap = (LinkedHashMap<String, Object>) map1.get("tableInfo");
                    LinkedHashMap<String, String> parameters =
                            (LinkedHashMap<String, String>) tableInfoMap.get("parameters");
                    LinkedHashMap<String, Object> sd = (LinkedHashMap<String, Object>) tableInfoMap.get("sd");

                    String location = (String) sd.get("location");
                    String tableType = (String) tableInfoMap.get("tableType");
                    Integer createTime = (Integer) tableInfoMap.get("createTime");

                    MetaTable tableInfo = new MetaTable();
                    tableInfo.setTableName(tableName);
                    tableInfo.setLocation(location);

                    ArrayList<String> partitionKeys = (ArrayList<String>) tableInfoMap.get("partitionKeys");
                    if (partitionKeys != null && partitionKeys.size() > 0) {
                        tableInfo.setPartition(true);
                    }
                    tableInfo.setTableType(this.getTableType(tableType));
                    tableInfo.setComment(parameters.getOrDefault("comment", null));
                    tableInfo.setCreateTime(new Date(createTime * 1000L));
                    String lastDdlTime = parameters.getOrDefault("transient_lastDdlTime", null);
                    if (lastDdlTime != null) {
                        tableInfo.setLastDdlTime(new Date(Long.parseLong(lastDdlTime) * 1000L));
                    }

                    Map<String, Object> filterTableInfoMap = tableInfoMap.entrySet().stream()
                            .filter(entry -> !StringUtils.startsWith(entry.getKey(), "set")
                                    && entry.getValue() != null
                                    && !(entry.getValue() instanceof Boolean))
                            .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));

                    FileFormat fileFormat = parseFileFormat(parameters, sd);
                    tableInfo.setFileFormat(fileFormat);
                    String hudiType = null;
                    String[] primaryKeys = null;
                    if (fileFormat == FileFormat.HUDI) {
                        hudiType = parameters.get("type");
                        tableInfo.setEngine(hudiType);
                        primaryKeys = StringUtils.split(parameters.get("primaryKey"), ",");
                    } else if (fileFormat == FileFormat.ICEBERG) {
                        parseIcebergTableProperties(tableInfo, parameters);
                    } else if (fileFormat == FileFormat.PAIMON) {
                        primaryKeys = parsePaimonTableProperties(tableInfo, parameters);
                    } else {
                        parseHiveTablePartitionName(tableInfo, filterTableInfoMap);
                    }

                    Map<String, Object> filterSd = sd.entrySet().stream()
                            .filter(entry -> !StringUtils.startsWith(entry.getKey(), "set")
                                    && entry.getValue() != null
                                    && !(entry.getValue() instanceof Boolean))
                            .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));

                    ArrayList<LinkedHashMap<String, Object>> cols =
                            (ArrayList<LinkedHashMap<String, Object>>) filterSd.get("cols");
                    List<MetaColumn> columns = new ArrayList<>();
                    for (LinkedHashMap<String, Object> colMap : cols) {
                        MetaColumn field = new MetaColumn();
                        String columnName = (String) colMap.get("name");
                        // 过滤掉hudi 元数据字段
                        if (StringUtils.startsWith(columnName, "_hoodie_")) {
                            continue;
                        }
                        field.setColumnName(columnName);
                        String hiveType = (String) colMap.get("type");
                        field.setDataType(hiveType);
                        field.setComment((String) colMap.get("comment"));

                        try {
                            int jdbcTypeInt = (Integer) hiveTypeToSqlTypeMethod.invoke(null, hiveType);
                            field.setJdbcType(JDBCType.valueOf(jdbcTypeInt));
                        } catch (Exception e) {
                        }

                        if (primaryKeys != null) {
                            if (ArrayUtils.contains(primaryKeys, columnName)) {
                                field.setPrimaryKey(true);
                            }
                        }
                        columns.add(field);
                    }
                    tableInfo.setColumns(columns);

                    filterSd.remove("cols");
                    filterTableInfoMap.put("sd", filterSd);
                    tableInfo.setProperties(filterTableInfoMap);
                    return tableInfo;
                }
            }
            return null;
        } catch (SQLException e) {
            throw new RuntimeException("Get Scheme Error: " + e.getMessage(), e);
        }
    }

    private FileFormat parseFileFormat(LinkedHashMap<String, String> parameters, LinkedHashMap<String, Object> sd) {
        String tableType = parameters.get("table_type");
        if (StringUtils.equals(tableType, "ICEBERG")) {
            return FileFormat.ICEBERG;
        } else if (StringUtils.equals(tableType, "PAIMON")) {
            return FileFormat.PAIMON;
        }

        String provider = parameters.getOrDefault("provider", null);
        if (provider == null) {
            String inputFormat = (String) sd.get("inputFormat");
            if ("org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat".equals(inputFormat)) {
                return FileFormat.PARQUET;
            } else if ("org.apache.orc.mapred.OrcInputFormat".equals(inputFormat)) {
                return FileFormat.ORC;
            } else if ("org.apache.avro.mapred.AvroInputFormat".equals(inputFormat)) {
                return FileFormat.AVRO;
            } else if ("org.apache.avro.mapred.TextInputFormat".equals(inputFormat)) {
                return FileFormat.TEXT;
            } else if (StringUtils.startsWith(inputFormat, "org.apache.paimon")) {
                return FileFormat.PAIMON;
            } else if (StringUtils.startsWith(inputFormat, "org.apache.hudi.hadoop")) {
                return FileFormat.HUDI;
            } else if (StringUtils.startsWith(inputFormat, "iceberg")) {
                return FileFormat.ICEBERG;
            } else if (StringUtils.startsWith(inputFormat, "delta")) {
                return FileFormat.DELTA;
            }

            return FileFormat.UNKNOW;
        } else {
            return FileFormat.valueOf(provider.toUpperCase());
        }
    }

    private void parseIcebergTableProperties(MetaTable metaTable, LinkedHashMap<String, String> parameters) {
        if (parameters.containsKey("numFiles")) {
            metaTable.setDataSize(Long.valueOf(parameters.get("numFiles")));
        }
        if (parameters.containsKey("numRows")) {
            metaTable.setDataSize(Long.valueOf(parameters.get("numRows")));
        }

        if (parameters.containsKey("default-partition-spec")) {
            String json = parameters.get("default-partition-spec");
            LinkedHashMap<String, Object> partitionSpec = JsonUtils.toJavaMap(json);

            ArrayList<LinkedHashMap<String, Object>> fields =
                    (ArrayList<LinkedHashMap<String, Object>>) partitionSpec.get("fields");

            List<String> partitionNames =
                    fields.stream().map(map -> (String) map.get("name")).collect(Collectors.toList());
            metaTable.setPartitionKeys(partitionNames);
        }
    }

    private String[] parsePaimonTableProperties(MetaTable metaTable, LinkedHashMap<String, String> parameters) {
        String[] primaryKeys = null;
        if (parameters.containsKey("partition")) {
            String partition = parameters.get("partition");
            String[] partitions = StringUtils.split(partition, ",");
            metaTable.setPartitionKeys(Lists.newArrayList(partitions));
        }

        if (parameters.containsKey("primary-key")) {
            String primarKey = parameters.get("primary-key");
            primaryKeys = StringUtils.split(primarKey, ",");
        }

        return primaryKeys;
    }

    private void parseHiveTablePartitionName(MetaTable metaTable, Map<String, Object> filterTableInfoMap) {
        ArrayList<LinkedHashMap<String, Object>> partitionKeyList =
                (ArrayList<LinkedHashMap<String, Object>>) filterTableInfoMap.get("partitionKeys");
        List<String> partitions = Lists.newArrayList();
        for (LinkedHashMap<String, Object> keyMap : partitionKeyList) {
            partitions.add((String) keyMap.get("name"));
        }

        metaTable.setPartitionKeys(partitions);
    }

    protected BeeTableType getTableType(String type) {
        if (StringUtils.equalsIgnoreCase(type, "VIEW")) {
            return BeeTableType.VIEW;
        } else if (StringUtils.equalsIgnoreCase(type, "MANAGED_TABLE")) {
            return BeeTableType.TABLE;
        } else if (StringUtils.equalsIgnoreCase(type, "EXTERNAL_TABLE")) {
            return BeeTableType.EXTERNAL_TABLE;
        } else {
            throw new IllegalArgumentException("table type is invalid: " + type);
        }
    }
}
