package com.gitee.melin.bee.core.jdbc.dialect.hive;

import com.gitee.melin.bee.core.jdbc.ConnectionFactory;
import com.gitee.melin.bee.core.jdbc.dialect.DefaultTypeConvertor;
import com.gitee.melin.bee.core.jdbc.relational.ConnectionInfo;
import com.gitee.melin.bee.core.jdbc.relational.MetaDataSource;
import com.gitee.melin.bee.core.jdbc.enums.DataSourceType;
import com.gitee.melin.bee.core.jdbc.QueryResult;
import com.gitee.melin.bee.core.jdbc.relational.MetaColumn;
import com.gitee.melin.bee.core.jdbc.relational.MetaTable;
import com.gitee.melin.bee.core.jdbc.dialect.DataTypeConvertor;
import com.gitee.melin.bee.core.jdbc.dialect.AbstractJdbcDialect;
import com.gitee.melin.bee.core.jdbc.dialect.IDBQuery;
import com.gitee.melin.bee.util.JsonUtils;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.SaslRpcServer;
import org.apache.hadoop.security.UserGroupInformation;

import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.sql.*;
import java.util.*;
import java.util.Date;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;

public class HiveJdbcDialect extends AbstractJdbcDialect {

    @Override
    public DataSourceType getDataSourceType() {
        return DataSourceType.HIVE;
    }

    private static final Configuration CONFIGURATION = new Configuration();

    static {
        CONFIGURATION.set("hadoop.security.authentication", "kerberos");
        CONFIGURATION.set("hadoop.security.authorization", "true");
    }

    public HiveJdbcDialect(ConnectionInfo connectionInfo) {
        super(connectionInfo);
    }

    public HiveJdbcDialect(Connection connection) {
        super(connection);
    }

    @Override
    protected IDBQuery getDBQuery() {
        return new HiveQuery();
    }

    @Override
    protected DataTypeConvertor getTypeConvert() {
        return new DefaultTypeConvertor();
    }

    protected <T> T runSecured(final Callable<T> securedCallable) {
        String authentication = "simple";
        try {
            UserGroupInformation userGroupInformation;
            if (StringUtils.isNotBlank(this.connectionInfo.getPrincipal()) ||
                    StringUtils.isNotBlank(this.connectionInfo.getKrb5File()) ||
                    StringUtils.isNotBlank(this.connectionInfo.getKeytabFile())) { // kerberos 认证

                authentication = "kerberos";
                userGroupInformation = loginToKerberos();
            } else {
                String principal = StringUtils.isNotBlank(this.connectionInfo.getPrincipal()) ? this.connectionInfo.getPrincipal() : SystemUtils.getUserName();
                userGroupInformation = UserGroupInformation.createRemoteUser(principal, SaslRpcServer.AuthMethod.SIMPLE);
                UserGroupInformation.setLoginUser(userGroupInformation);
            }

            return userGroupInformation.doAs((PrivilegedExceptionAction<T>) securedCallable::call);
        } catch (Exception e) {
            String msg = "Authentication: " + authentication + " 登录失败: " + e.getMessage();
            throw new RuntimeException(msg, e);
        }
    }

    private UserGroupInformation loginToKerberos() throws IOException {
        System.setProperty("java.security.krb5.conf", this.connectionInfo.getKrb5File());
        //https://stackoverflow.com/questions/34616676/should-i-call-ugi-checktgtandreloginfromkeytab-before-every-action-on-hadoop
        UserGroupInformation connectUgi = UserGroupInformation.getCurrentUser();
        if (!connectUgi.isFromKeytab()) {
            UserGroupInformation.setConfiguration(CONFIGURATION);

            // 兼容华为 mrs hive，hw 需要有一个username，UserGroupInformation 登录使用 username
            String principal = StringUtils.isNotBlank(this.connectionInfo.getUsername())
                    ? this.connectionInfo.getUsername() : connectionInfo.getPrincipal();
            connectUgi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
                    principal, this.connectionInfo.getKeytabFile());
        }

        connectUgi.checkTGTAndReloginFromKeytab();

        return connectUgi;
    }

    @Override
    protected Driver getDriver() {
        return loadDriver(DataSourceType.HIVE.getDriverClass());
    }

    @Override
    protected String getJdbcUrl() {
        String jdbcUrl = this.connectionInfo.getJdbcUrl();
        if (StringUtils.isNotBlank(this.connectionInfo.getPrincipal()) ||
                StringUtils.isNotBlank(this.connectionInfo.getKrb5File()) ||
                StringUtils.isNotBlank(this.connectionInfo.getKeytabFile())) { // kerberos 认证

            if (!StringUtils.contains(jdbcUrl, ";principal=")) {
                jdbcUrl = jdbcUrl + ";principal=" + this.connectionInfo.getPrincipal();
            }
        }

        return jdbcUrl;
    }

    @Override
    protected String getDDLColumnName() {
        return "createtab_stmt";
    }

    @Override
    public final List<String> getSchemas() {
        return runSecured(super::getSchemas);
    }

    @Override
    public ConnectionFactory createConnectionFactory(Map<String, String> properties) {
        return runSecured(() -> super.createConnectionFactory(properties));
    }

    @Override
    public MetaDataSource testConnection() {
        return runSecured(super::testConnection);
    }

    @Override
    protected List<String> listSchemas(Connection connection) {
        return runSecured(() -> super.listSchemas(connection));
    }

    @Override
    public List<MetaTable> getSchemaTables(String schemaName) {
        return runSecured(() -> this.getTables(schemaName));
    }

    @Override
    public List<MetaColumn> getSchemaColumns(String schemaName, String tableName) {
        return runSecured(() -> {
            try (Connection connection = this.getConnection()) {
                List<MetaColumn> columns = new ArrayList<>();

                connection.prepareStatement("set hive.ddl.output.format=json").execute();

                Map<String, JDBCType> jdbcTypeMap = Maps.newHashMap();
                try {
                    DatabaseMetaData metaData = connection.getMetaData();
                    ResultSet results = metaData.getColumns(null, schemaName, tableName, null);
                    while (results.next()) {
                        String columnName = results.getString("COLUMN_NAME");
                        int jdbcType = results.getInt("DATA_TYPE");
                        jdbcTypeMap.put(columnName, JDBCType.valueOf(jdbcType));
                    }
                } catch (SQLException e) {
                    throw new RuntimeException("Get Scheme Error: " + e.getMessage(), e);
                }

                String sql = getDBQuery().columnsSql(schemaName, tableName);
                try (PreparedStatement preparedStatement = connection.prepareStatement(sql);
                     ResultSet resultSet = preparedStatement.executeQuery()) {

                    ResultSetMetaData metaData = resultSet.getMetaData();
                    List<String> columnList = new ArrayList<>();
                    for (int i = 1; i <= metaData.getColumnCount(); i++) {
                        columnList.add(metaData.getColumnLabel(i));
                    }

                    while (resultSet.next()) {
                        String json = resultSet.getString(getDBQuery().columnName());
                        Map<String, Object> map = JsonUtils.toJavaMap(json);
                        ArrayList<LinkedHashMap<String, String>> cols = (ArrayList<LinkedHashMap<String, String>>) map.get("columns");

                        for (LinkedHashMap<String, String> colMap : cols) {
                            MetaColumn field = new MetaColumn();
                            field.setColumnName(colMap.get("name"));
                            field.setColumnType(colMap.get("type"));
                            field.setComment(colMap.get("comment"));
                            columns.add(field);
                        }

                        LinkedHashMap<String, Object> tableInfo = (LinkedHashMap<String, Object>) map.get("tableInfo");
                        ArrayList<LinkedHashMap<String, String>> partitionKeys = (ArrayList<LinkedHashMap<String, String>>) tableInfo.get("partitionKeys");

                        for (LinkedHashMap<String, String> keyMap : partitionKeys) {
                            for (MetaColumn columnInfo : columns) {
                                if (StringUtils.equals(keyMap.get("name"), columnInfo.getColumnName())) {
                                    columnInfo.setPartitionField(true);
                                    break;
                                }
                            }
                         }
                        break;
                    }
                }

                return columns;
            } catch (SQLException e) {
                throw new RuntimeException("Get Scheme Error: " + e.getMessage(), e);
            }
        });
    }

    @Override
    public String getCreateTableScript(String schemaName, String tableName) {
        return runSecured(() -> this.getCreateHiveTableScript(schemaName, tableName));
    }

    public String getCreateHiveTableScript(String databaseName, String tableName) {
        try (Connection connection = connectionFactory.openConnection();
             PreparedStatement dbStat = connection.prepareStatement(
                     "SHOW CREATE table " + databaseName + "." + tableName)) {
            try (ResultSet dbResult = dbStat.executeQuery()) {
                StringBuffer createTableScript = new StringBuffer();
                while (dbResult.next()) {
                    createTableScript.append(dbResult.getString(1)).append("\n");
                }
                if (createTableScript.length() > 0){
                    return createTableScript.toString();
                } else {
                    return "DDL is not available";
                }
            }
        } catch (SQLException e) {
            throw new RuntimeException("Get Scheme Error", e);
        }
    }

    @Override
    public CompletableFuture<QueryResult> asyncQuery(String sql, int maxRecords) {
        return runSecured(() -> super.asyncQuery(sql, maxRecords));
    }

    @Override
    public MetaTable getSchemaTable(String schemaName, String tableName) {
        if (StringUtils.isBlank(schemaName)) {
            throw new IllegalStateException("schemaName can not blank");
        }

        if (StringUtils.isBlank(tableName)) {
            throw new IllegalStateException("tableName can not blank");
        }
        return runSecured(() -> this.getTable(schemaName, tableName));
    }

    protected List<MetaTable> getTables(String schemaName) {
        try (Connection connection = this.getConnection()) {
            connection.prepareStatement("set hive.ddl.output.format=json").execute();

            if (StringUtils.isNotBlank(schemaName)) {
                connection.setSchema(schemaName);
            }

            String sql = getDBQuery().tablesSql(schemaName);
            try (PreparedStatement preparedStatement = connection.prepareStatement(sql);
                 ResultSet resultSet = preparedStatement.executeQuery()) {

                List<MetaTable> tableList = new ArrayList<>();
                while (resultSet.next()) {
                    String json = resultSet.getString(getDBQuery().tableName());
                    Map<String, Object> map = JsonUtils.toJavaMap(json);
                    ArrayList<String> tables = (ArrayList<String>) map.get("tables");
                    for (String table : tables) {
                        MetaTable tableInfo = new MetaTable();
                        tableInfo.setTableName(table);
                        tableList.add(tableInfo);
                    }
                }
                return tableList;
            }
        } catch (SQLException e) {
            throw new RuntimeException("Get Scheme Error: " + e.getMessage(), e);
        }
    }

    protected MetaTable getTable(String schemaName, String tableName) {
        try (Connection connection = this.getConnection()) {
            Statement statement = connection.createStatement();
            statement.executeUpdate("set hive.ddl.output.format=json");

            // 查询单个表信息
            String descTableSql = getDBQuery().columnsSql(schemaName, tableName);
            try (ResultSet rs = statement.executeQuery(descTableSql)) {
                while (rs.next()) {
                    String json1 = rs.getString(getDBQuery().columnName());
                    Map<String, Object> map1 = JsonUtils.toJavaMap(json1);
                    LinkedHashMap<String, Object> tableInfoMap = (LinkedHashMap<String, Object>) map1.get("tableInfo");
                    LinkedHashMap<String, String> parameters = (LinkedHashMap<String, String>) tableInfoMap.get("parameters");

                    LinkedHashMap<String, Object> sd = (LinkedHashMap<String, Object>) tableInfoMap.get("sd");
                    String location = (String) sd.get("location");
                    String tableType = (String) tableInfoMap.get("tableType");
                    Integer createTime = (Integer) tableInfoMap.get("createTime");

                    MetaTable tableInfo = new MetaTable();
                    tableInfo.setTableName(tableName);
                    tableInfo.setLocation(location);
                    tableInfo.setTableType(tableType);
                    tableInfo.setComment(parameters.getOrDefault("comment", null));
                    tableInfo.setCreateTime(new Date(createTime * 1000L));

                    Map<String, Object> filterTableInfoMap = tableInfoMap.entrySet()
                            .stream().filter(entry -> !StringUtils.startsWith(entry.getKey(), "set")
                                    && entry.getValue() != null && !(entry.getValue() instanceof Boolean))
                            .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));

                    Map<String, Object> filterSd = sd.entrySet()
                            .stream().filter(entry -> !StringUtils.startsWith(entry.getKey(), "set")
                                    && entry.getValue() != null && !(entry.getValue() instanceof Boolean))
                            .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));

                    ArrayList<LinkedHashMap<String, Object>> cols = (ArrayList<LinkedHashMap<String, Object>>) filterSd.get("cols");
                    List<MetaColumn> columns = new ArrayList<>();
                    for (LinkedHashMap<String, Object> colMap : cols) {
                        MetaColumn field = new MetaColumn();
                        field.setColumnName((String) colMap.get("name"));
                        field.setColumnType((String) colMap.get("type"));
                        field.setComment((String) colMap.get("comment"));
                        columns.add(field);
                    }
                    tableInfo.setColumns(columns);

                    ArrayList<LinkedHashMap<String, Object>> partitionKeys = (ArrayList<LinkedHashMap<String, Object>>) filterTableInfoMap.get("partitionKeys");
                    List<String> partitions = Lists.newArrayList();
                    for (LinkedHashMap<String, Object> keyMap : partitionKeys) {
                        partitions.add((String) keyMap.get("name"));
                    }

                    filterSd.remove("cols");
                    filterTableInfoMap.put("sd", filterSd);
                    tableInfo.setProperties(filterTableInfoMap);
                    tableInfo.setPartitionKeys(partitions);
                    return tableInfo;
                }
            }
            return null;
        } catch (SQLException e) {
            throw new RuntimeException("Get Scheme Error: " + e.getMessage(), e);
        }
    }
}
