package com.central.datax.plugin.writer.hivewriter;

import com.alibaba.datax.common.exception.DataXException;
import com.alibaba.datax.common.util.Configuration;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.statement.SQLColumnDefinition;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLTableElement;
import com.alibaba.druid.sql.parser.SQLParserUtils;
import com.alibaba.druid.sql.parser.SQLStatementParser;
import com.alibaba.druid.util.JdbcConstants;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.*;
import java.util.*;
import java.util.stream.Collectors;

/**
 * @author Tindy
 * @date 2022/3/8
 * @describe
 */
public class HiveHelper {
    private static final Logger LOG = LoggerFactory.getLogger(HiveHelper.class);
    public static final String ORG_APACHE_HIVE_JDBC_HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver";
    private Configuration writerSliceConfig;
    private Connection conn;
    public HiveHelper(Configuration writerSliceConfig) {
        this.writerSliceConfig=writerSliceConfig;
    }

    public Connection getConn() throws SQLException {
        if(this.conn!=null && !this.conn.isClosed()){
            return this.conn;
        }
        try {
            Class.forName(ORG_APACHE_HIVE_JDBC_HIVE_DRIVER);
            Boolean haveKerberos = this.writerSliceConfig.getBool(Key.HAVE_KERBEROS, false);
            if(haveKerberos) {
                synchronized (Constant.KRB5CONF_LOCK) {
                    UserGroupInformation.setLoginUser(null);
                    System.clearProperty("java.security.krb5.conf");
                    String krb5ConfPath=this.writerSliceConfig.getString(Key.KERBEROS_CONF_FILE_PATH);
                    System.setProperty("java.security.krb5.conf", krb5ConfPath);
                    org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
                    conf.set("hadoop.security.authentication", "Kerberos");
                    UserGroupInformation.setConfiguration(conf);
                    String keytabPath = this.writerSliceConfig.getString(Key.KERBEROS_KEYTAB_FILE_PATH);
                    String kerberosPrincipal=this.writerSliceConfig.getString(Key.KERBEROS_PRINCIPAL);
                    UserGroupInformation.loginUserFromKeytab(kerberosPrincipal, keytabPath);
                    this.conn= DriverManager.getConnection(this.writerSliceConfig.getString(Key.JDBC_URL));
                }
            }else {
                this.conn = DriverManager.getConnection(this.writerSliceConfig.getString(Key.JDBC_URL),this.writerSliceConfig.getString(Key.USERNAME),this.writerSliceConfig.getString(Key.PASSWORD));
            }
        } catch (Exception e) {
            LOG.error(e.getMessage(), e);
        }
        return this.conn;
    }

    public void closeConn(){
        try {
            if(this.conn!=null && !this.conn.isClosed()){
                conn.close();
            }
        }catch (SQLException e){
            throw DataXException.asDataXException(HiveWriterErrorCode.CONN_CLOSE_ERROR,e);
        }
    }

    public String createTmpTable(String tmpTable, List<Configuration> columns) {
        String location = null;
        try {
            Statement statement = getConn().createStatement();
            StringBuilder createTableDdl=new StringBuilder("create table ");
            createTableDdl.append(tmpTable).append(" (");
            for (Configuration eachColumnConf : columns) {
                String columnName = eachColumnConf.getNecessaryValue(Key.NAME, HiveWriterErrorCode.COLUMN_REQUIRED_VALUE);
                String columnType = eachColumnConf.getNecessaryValue(Key.TYPE, HiveWriterErrorCode.COLUMN_REQUIRED_VALUE);
                createTableDdl.append(columnName).append(" ").append(columnType).append(",");
            }
            createTableDdl.deleteCharAt(createTableDdl.length()-1).append(")");
            createTableDdl.append("stored as orc");
            statement.execute(createTableDdl.toString());
            ResultSet resultSet = statement.executeQuery("show create table " + tmpTable);
            while (resultSet.next()) {
                String s = resultSet.getString(1);
                if ("LOCATION".equals(s)) {
                    resultSet.next();
                    location = resultSet.getString(1).trim();
                    break;
                }
            }
        } catch (SQLException e) {
            LOG.error(e.getMessage(), e);
            throw DataXException.asDataXException(HiveWriterErrorCode.CONN_DB_ERROR, "临时表创建失败", e.getCause());
        }
        return location.substring(1, location.length() - 1);
    }

    /**
     *
     * @param table
     * @return key is column name，value is partition flag
     */
    public Map<String,Boolean> getColumns(String table) {
        Map<String,Boolean> columns = new LinkedHashMap<>();
        try {
            Statement statement = getConn().createStatement();
            ResultSet resultSet = statement.executeQuery("SHOW CREATE TABLE " + table);
            StringBuilder createTableDdl = new StringBuilder();
            while (resultSet.next()) {
                String s = resultSet.getString(1);
                if ("ROW FORMAT SERDE ".equals(s)) break;
                createTableDdl.append(s);
            }
            if (createTableDdl.substring(7, 15).equals("EXTERNAL")) {
                createTableDdl.delete(7, 15);
            }
            SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(createTableDdl.toString(), JdbcConstants.HIVE);
            List<SQLStatement> stmtList = parser.parseStatementList();
            for (SQLStatement stmt : stmtList) {
                SQLCreateTableStatement createTable = ((SQLCreateTableStatement) stmt);
                for (SQLTableElement sqlTableElement : createTable.getTableElementList()) {
                    SQLColumnDefinition element = (SQLColumnDefinition) sqlTableElement;
                    String columnName = element.getName().toString();
                    if (columnName.matches("`.*`")) {
                        columnName = columnName.substring(1, columnName.length() - 1);
                    }
                    columns.put(columnName,false);
                }
                for (SQLColumnDefinition sqlTableElement : createTable.getPartitionColumns()) {
                    SQLColumnDefinition element = (SQLColumnDefinition) sqlTableElement;
                    String columnName = element.getName().toString();
                    if (columnName.matches("`.*`")) {
                        columnName = columnName.substring(1, columnName.length() - 1);
                    }
                    columns.put(columnName,true);
                }
            }
        }catch (SQLException e){
            LOG.error(e.getMessage(),e);
            throw DataXException.asDataXException(HiveWriterErrorCode.CONN_DB_ERROR, "表字段获取失败", e.getCause());
        }
        return columns;
    }

    /**
     * 将临时表数据迁移到目标表
     *
     * @param tmpTable
     * @param table
     * @param writeMode
     */
    public void moveData(String tmpTable, String table, String writeMode) {
        try {
            //获取临时表/目标表字段
            Map<String, Boolean> tmpTableColumns = getColumns(tmpTable);
            Map<String, Boolean> tableColumns = getColumns(table);
            //获取目标表分区字段
            List<String> partitions=tableColumns.entrySet().stream().filter(x-> x.getValue()).map(x->x.getKey()).collect(Collectors.toList());
            //拼接查询插入sql
            StringBuilder moveSql=new StringBuilder("insert ").append(writeMode).append(" table ").append(table);
            if(partitions.size()>0){
                moveSql.append(" partition(");
                for (String column : partitions) {
                    moveSql.append(column).append(",");
                }
                moveSql.deleteCharAt(moveSql.length()-1).append(")");
            }
            moveSql.append(" select ");
            //如果目标表的字段在临时表没用，则用null替换
            for (String column : tableColumns.keySet()) {
                if(tmpTableColumns.containsKey(column)){
                    moveSql.append(column).append(",");
                }else{
                    moveSql.append("null as ").append(column).append(",");
                }
            }
            moveSql.deleteCharAt(moveSql.length()-1);
            moveSql.append(" from ").append(tmpTable);
            //执行插入sql
            Statement statement = getConn().createStatement();
            statement.execute("set hive.exec.dynamic.partition =true");
            statement.execute("set hive.exec.dynamic.partition.mode = nonstrict");
            statement.execute("set hive.exec.max.dynamic.partitions=10000");
            statement.execute("set hive.exec.max.dynamic.partitions.pernode=1000");

            LOG.info("begin move data from tmp table "+tmpTable+" to "+table);
            LOG.info("execute move data sql:"+moveSql.toString());
            statement.execute(moveSql.toString());
            //删除临时表
            LOG.info("drop tmp table "+tmpTable);
            statement.execute("drop table "+tmpTable);
        } catch (SQLException e) {
            LOG.error(e.getMessage(), e);
            throw DataXException.asDataXException(HiveWriterErrorCode.CONN_DB_ERROR, "数据从临时表迁移失败", e.getCause());
        }
    }

    public void dropTable(String tmpTable) {
        try {
            Statement statement = getConn().createStatement();
            LOG.info("drop tmp table "+tmpTable);
            statement.execute("drop table if exists " + tmpTable);
        } catch (SQLException e) {
            LOG.error(e.getMessage(), e);
            throw DataXException.asDataXException(HiveWriterErrorCode.CONN_DB_ERROR, "临时表删除失败", e.getCause());
        }
    }
}
