package com.central.scheduler.plugin.task.sql;

import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.parser.SQLParserUtils;
import com.alibaba.druid.sql.parser.SQLStatementParser;
import com.alibaba.druid.util.JdbcConstants;
import com.central.common.entity.SqlTaskContent;
import com.central.common.enums.DbType;
import com.central.common.enums.EngineType;
import com.central.common.enums.PrivilegeType;
import com.central.common.properties.Property;
import com.central.common.utils.JsonUtil;
import com.central.scheduler.plugin.task.api.AbstractTaskExecutor;
import com.central.scheduler.plugin.task.api.TaskException;
import com.central.scheduler.plugin.task.datasource.BaseConnectionParam;
import com.central.scheduler.plugin.task.datasource.DatasourceUtil;
import com.central.scheduler.plugin.task.datasource.hive.HiveConnectionParam;
import com.central.scheduler.plugin.task.util.CommonUtils;
import com.central.scheduler.plugin.task.util.UserHadoopUtils;
import com.central.scheduler.spi.enums.TaskTimeoutStrategy;
import com.central.scheduler.spi.task.*;
import com.central.scheduler.spi.task.paramparser.ParamUtils;
import com.central.scheduler.spi.task.paramparser.ParameterUtils;
import com.central.scheduler.spi.task.request.SQLTaskExecutionContext;
import com.central.scheduler.spi.task.request.TaskRequest;
import com.central.scheduler.spi.task.request.UdfFuncRequest;
import com.central.scheduler.spi.utils.JSONUtils;
import com.central.scheduler.spi.utils.PropertyUtils;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.io.IOException;
import java.sql.*;
import java.text.MessageFormat;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
 * @author Tindy
 * @date 2021/12/23
 * @describe
 */
public class SqlTask extends AbstractTaskExecutor {

    /**
     * taskExecutionContext
     */
    private TaskRequest taskExecutionContext;

    /**
     * sql parameters
     */
    private SqlParameters sqlParameters;
    /**
     * base datasource
     */
    private BaseConnectionParam baseConnectionParam;

    /**
     * create function format
     */
    private static final String CREATE_FUNCTION_FORMAT = "create temporary function %s as '%s' using jar '%s'";
    private static final String SQL_RESULT_DATA_NAME = "SqlResData:";
    /**
     * default query sql limit
     */
    private static final int LIMIT = 100000;
    private static final int VAR_POOL_LIST_LIMIT=10;

    /**
     * Abstract Yarn Task
     *
     * @param taskRequest taskRequest
     */
    public SqlTask(TaskRequest taskRequest) {
        super(taskRequest);
        this.taskExecutionContext = taskRequest;
        SqlTaskContent sqlTaskContent = JSONUtils.parseObject(taskExecutionContext.getTaskContent(), SqlTaskContent.class);
        this.sqlParameters=new SqlParameters();
        this.sqlParameters.setSql(sqlTaskContent.getSql());
        this.sqlParameters.setDatasource(taskExecutionContext.getSqlTaskExecutionContext().getDatasource());
        this.sqlParameters.setLocalParams(taskExecutionContext.getLocalParams());
        this.sqlParameters.setGlobalParams(taskExecutionContext.getGlobalParams());
        if (!sqlParameters.checkParameters()) {
            throw new RuntimeException("sql task params is not valid");
        }
    }

    @Override
    public AbstractParameters getParameters() {
        return sqlParameters;
    }

    @Override
    public void handle() throws Exception {
        // set the name of the current thread
        String threadLoggerInfoName = String.format(TaskConstants.TASK_LOG_INFO_FORMAT, taskExecutionContext.getTaskAppId());
        Thread.currentThread().setName(threadLoggerInfoName);

        logger.info("sql : {} , localParams : {},connParams : {},varPool : {}",
                sqlParameters.getSql(),
                sqlParameters.getLocalParams(),
                sqlParameters.getVarPool());
        // get datasource
        baseConnectionParam = (BaseConnectionParam)DatasourceUtil.buildConnectionParams(
                sqlParameters.getDatasource().getDatasourceType(),
                sqlParameters.getDatasource());
        try {
            SQLTaskExecutionContext sqlTaskExecutionContext = taskExecutionContext.getSqlTaskExecutionContext();

            //解析主sql语句，主sql语句 只能是 select，insert，update三种类型
            ArrayList<SqlBinds> sqlBindsList = parserMainSql(sqlParameters.getSql(), sqlParameters.getDatasource().getDatasourceType());

            //set keytab krb5conf LoginUserKeytabUsername while task is HIVE
            if(sqlParameters.getDatasource().getDatasourceType()==DbType.HIVE){
                //download keytab and conf file
                String keytabUrl = sqlParameters.getDatasource().getKeytabUrl();
                if(StringUtils.isNotEmpty(keytabUrl)){
                    logger.info("downloading keytab file ......");
                    downloadResource(keytabUrl);
                    String[] keytabPaths= keytabUrl.split("/");
                    String keytabPath=this.taskExecutionContext.getExecutePath()+"/"+keytabPaths[keytabPaths.length-1];
                    ((HiveConnectionParam)baseConnectionParam).setLoginUserKeytabPath(keytabPath);
                }
                String krb5ConfUrl = sqlParameters.getDatasource().getKrb5ConfUrl();
                if(StringUtils.isNotEmpty(krb5ConfUrl)){
                    logger.info("downloading krb5Conf file ......");
                    downloadResource(krb5ConfUrl);
                    String[] krb5ConfPaths= krb5ConfUrl.split("/");
                    String krb5ConfPath=this.taskExecutionContext.getExecutePath()+"/"+krb5ConfPaths[krb5ConfPaths.length-1];
                    ((HiveConnectionParam)baseConnectionParam).setJavaSecurityKrb5Conf(krb5ConfPath);
                }
                ((HiveConnectionParam)baseConnectionParam).setLoginUserKeytabUsername(sqlParameters.getDatasource().getUsername());
            }

            List<String> createFuncs =new ArrayList<>();
            List<String> setParams =new ArrayList<>();
            if(sqlParameters.getDatasource().getIsSys()){
                createFuncs=createFuncs(sqlTaskExecutionContext.getUdfs(),sqlTaskExecutionContext.getDefaultFS(), logger);
                setParams=setEngineQueue(sqlTaskExecutionContext.getEngine(),sqlTaskExecutionContext.getQueue());
                setSetParams(setParams,sqlTaskExecutionContext.getSetParams());
            }

            // execute sql task
            executeFuncAndSql(sqlBindsList,setParams,createFuncs,sqlTaskExecutionContext);
            setExitStatusCode(TaskConstants.EXIT_CODE_SUCCESS);

        } catch (Exception e) {
            setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE);
            logger.error("sql task error: {}", e.toString());
            throw e;
        }
    }



    private ArrayList<SqlBinds> parserMainSql(String sql, DbType datasourceType) {
        com.alibaba.druid.DbType dbType=null;
        switch (datasourceType){
            case HIVE:
                dbType=JdbcConstants.HIVE;
                break;
            case MYSQL:
                dbType=JdbcConstants.MYSQL;
                break;
            default:
                break;
        }
        ArrayList<SqlBinds> sqlBindsList = new ArrayList<>();
        SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
        List<SQLStatement> sqlStatements = parser.parseStatementList();
        for (SQLStatement sqlStatement : sqlStatements) {
            if(sqlStatement instanceof SQLSelectStatement || sqlStatement instanceof SQLUpdateStatement || sqlStatement instanceof SQLInsertStatement || sqlStatement instanceof SQLDeleteStatement || sqlStatement instanceof SQLTruncateStatement){
                // ready to execute SQL and parameter entity Map
                String sqlS = sqlStatement.toString();
                Pattern pattern = Pattern.compile(".*;", Pattern.CASE_INSENSITIVE | Pattern.DOTALL);
                Matcher matcher = pattern.matcher(sqlS);
                if(matcher.matches()){
                    sqlS=sqlS.substring(0,sqlS.length()-1);
                }
                SqlBinds mainSqlBinds = getSqlAndSqlParamsMap(sqlS);
                if ((sqlStatement instanceof SQLSelectStatement)) {
                    mainSqlBinds.setSqlType(SqlType.QUERY);
                } else {
                    mainSqlBinds.setSqlType(SqlType.NON_QUERY);
                }
                sqlBindsList.add(mainSqlBinds);
            }else {
                logger.error("'"+sqlStatement.toString()+"' 类型错误，sql语句仅支持 select，insert，update，delete,truncate 5种");
                throw new SqlException();
            }
        }
        return sqlBindsList;
    }

    /**
     * execute function and sql
     * @param mainSqlBinds main sql binds
     * @param setParams
     * @param createFuncs create functions
     * @param sqlTaskExecutionContext
     */
    public void executeFuncAndSql(List<SqlBinds> mainSqlBinds, List<String> setParams, List<String> createFuncs, SQLTaskExecutionContext sqlTaskExecutionContext) throws Exception {
        Connection connection = null;
        PreparedStatement stmt = null;
        ResultSet resultSet = null;
        try {

            // create connection
            connection = DatasourceUtil.getConnection(sqlParameters.getDatasource().getDatasourceType(), baseConnectionParam);
            // set engine queue
            if (CollectionUtils.isNotEmpty(setParams)) {
                setParams(connection,setParams);
            }
            // create temp function
            if (CollectionUtils.isNotEmpty(createFuncs)) {
                createTempFunction(connection, createFuncs);
            }
            int resultFileNo=1;
            String sqlResDataNamePrefix = SQL_RESULT_DATA_NAME+
                    taskExecutionContext.getTaskLogName().substring(0,taskExecutionContext.getTaskLogName().length()-1)+"-sqlResData-";
            //表权限校验是否开启
            boolean dataPrivilegeEnable = PropertyUtils.getBoolean(TaskConstants.DATA_PRIVILEGE_ENABLE, false);
            List<Map<String, Set<PrivilegeType>>> needPrivileges = sqlTaskExecutionContext.getNeedPrivileges();
            Map<String, Set<PrivilegeType>> tablePrivileges = sqlTaskExecutionContext.getTablePrivileges();
            for (SqlBinds mainSqlBind : mainSqlBinds) {
                stmt = prepareStatementAndBind(connection, mainSqlBind);
                //表权限校验,系统hive库通过keytab校验表权限，此次无需校验
                if(!sqlParameters.getDatasource().getIsSys() && dataPrivilegeEnable){
                    Map<String, Set<PrivilegeType>> needPrivilege = needPrivileges.get(resultFileNo - 1);
                    //key is datasourceId.dbName.tableName
                    for (String key : needPrivilege.keySet()) {
                        Set<PrivilegeType> privileges = tablePrivileges.getOrDefault(key,new HashSet<>());
                        if(!privileges.containsAll(needPrivilege.get(key))){
                            logger.error(key+" privilege denied,need privileges is "+ JSONUtils.toJsonString(needPrivilege.get(key))+",user have privileges is "+JSONUtils.toJsonString(privileges));
                            throw new IllegalArgumentException();
                        }
                    }
                }
                // decide whether to executeQuery or executeUpdate based on sqlType
                if(mainSqlBind.getSqlType()==SqlType.QUERY){
                    // query statements need to be convert to JsonArray and inserted into Alert to send
                    String result = null;
                    resultSet = stmt.executeQuery();
                    result = resultProcess(resultSet,sqlResDataNamePrefix,resultFileNo);
                    //deal out params
                    sqlParameters.dealOutParam(result);
                }else {
                    stmt.executeUpdate();
                }
                resultFileNo++;
            }
        } catch (Exception e) {
            logger.error("execute sql error: {}", e.getMessage());
            throw e;
        } finally {
            close(resultSet, stmt, connection);
        }
    }


    /**
     * result process
     *
     * @param resultSet resultSet
     * @param sqlResDataNamePrefix
     * @param resultFileNo
     * @throws Exception Exception
     */
    private String resultProcess(ResultSet resultSet, String sqlResDataNamePrefix, int resultFileNo) throws Exception {

        Logger dataLogger = LoggerFactory.getLogger( sqlResDataNamePrefix+resultFileNo);
        ArrayNode resultJSONArray = JSONUtils.createArrayNode();
        if (resultSet != null) {
            ResultSetMetaData md = resultSet.getMetaData();
            int num = md.getColumnCount();
            //结果数据字段名输出
            StringBuilder sb = new StringBuilder();
            for (int i = 1; i <= num; i++) {
                sb.append(md.getColumnLabel(i)).append("\t");
            }
            dataLogger.info(sb.toString());
            int rowCount = 0;
            while (rowCount < LIMIT && resultSet.next()) {
                if(resultJSONArray.size()<VAR_POOL_LIST_LIMIT){
                    ObjectNode mapOfColValues = JSONUtils.createObjectNode();
                    for (int i = 1; i <= num; i++) {
                        mapOfColValues.set(md.getColumnLabel(i), JSONUtils.toJsonNode(resultSet.getObject(i)));
                    }
                    resultJSONArray.add(mapOfColValues);
                }
                sb.setLength(0);
                for (int i = 1; i <= num; i++) {
                    sb.append(resultSet.getObject(i)).append("\t");
                }
                dataLogger.info(sb.toString());
                rowCount++;
            }
        }
        String result = JSONUtils.toJsonString(resultJSONArray);
        logger.debug("execute sql result : {}", result);
        return result;
    }

    /**
     *  load udf jar
     * @param connection
     * @param loadUdfJars
     */
    private void loadUdfJar(Connection connection, List<String> loadUdfJars) {
        try (Statement funcStmt = connection.createStatement()) {
            for (String loadUdfJar : loadUdfJars) {
                logger.info("hive load udf jar sql: {}", loadUdfJar);
                funcStmt.execute(loadUdfJar);
            }
        } catch (Exception e){}
    }

    /**
     * create temp function
     *
     * @param connection connection
     * @param engineQueue engineQueue
     */
    private void setParams(Connection connection,
                                    List<String> engineQueue) throws Exception {
        try (Statement setStmt = connection.createStatement()) {
            for (String sql : engineQueue) {
                logger.info(sql);
                setStmt.execute(sql);
            }
        }
    }
    /**
     * create temp function
     *
     * @param connection connection
     * @param createFuncs createFuncs
     */
    private void createTempFunction(Connection connection,
                                    List<String> createFuncs) throws Exception {
        try (Statement funcStmt = connection.createStatement()) {
            for (String createFunc : createFuncs) {
                logger.info("hive create function sql: {}", createFunc);
                funcStmt.execute(createFunc);
            }
        }
    }

    /**
     * close jdbc resource
     *
     * @param resultSet resultSet
     * @param pstmt pstmt
     * @param connection connection
     */
    private void close(ResultSet resultSet,
                       PreparedStatement pstmt,
                       Connection connection) {
        if (resultSet != null) {
            try {
                resultSet.close();
            } catch (SQLException e) {
                logger.error("close result set error : {}", e.getMessage(), e);
            }
        }

        if (pstmt != null) {
            try {
                pstmt.close();
            } catch (SQLException e) {
                logger.error("close prepared statement error : {}", e.getMessage(), e);
            }
        }

        if (connection != null) {
            try {
                connection.close();
            } catch (SQLException e) {
                logger.error("close connection error : {}", e.getMessage(), e);
            }
        }
    }

    /**
     * preparedStatement bind
     *
     * @param connection connection
     * @param sqlBinds sqlBinds
     * @return PreparedStatement
     * @throws Exception Exception
     */
    private PreparedStatement prepareStatementAndBind(Connection connection, SqlBinds sqlBinds) {
        // is the timeout set
        boolean timeoutFlag = taskExecutionContext.getTaskTimeoutStrategy() == TaskTimeoutStrategy.FAILED
                || taskExecutionContext.getTaskTimeoutStrategy() == TaskTimeoutStrategy.WARNFAILED;
        try {
            PreparedStatement stmt = connection.prepareStatement(sqlBinds.getSql());
            if (timeoutFlag) {
                stmt.setQueryTimeout(taskExecutionContext.getTaskTimeout());
            }
            Map<Integer, Property> params = sqlBinds.getParamsMap();
            if (params != null) {
                for (Map.Entry<Integer, Property> entry : params.entrySet()) {
                    Property prop = entry.getValue();
                    ParameterUtils.setInParameter(entry.getKey(), stmt, prop.getType(), prop.getValue());
                }
            }
            logger.info("prepare statement replace sql : {} ", stmt);
            return stmt;
        } catch (Exception exception) {
            throw new TaskException("SQL task prepareStatementAndBind error", exception);
        }

    }

    /**
     * regular expressions match the contents between two specified strings
     *
     * @param content content
     * @param rgex rgex
     * @param sqlParamsMap sql params map
     * @param paramsPropsMap params props map
     */
    public void setSqlParamsMap(String content, String rgex, Map<Integer, Property> sqlParamsMap, Map<String, Property> paramsPropsMap) {
        Pattern pattern = Pattern.compile(rgex);
        Matcher m = pattern.matcher(content);
        int index = 1;
        while (m.find()) {

            String paramName = m.group(1);
            Property prop = paramsPropsMap.get(paramName);

            if (prop == null) {
                logger.error("setSqlParamsMap: No Property with paramName: {} is found in paramsPropsMap of task instance"
                        + " with id: {}. So couldn't put Property in sqlParamsMap.", paramName, taskExecutionContext.getTaskInstanceId());
            } else {
                sqlParamsMap.put(index, prop);
                index++;
                logger.info("setSqlParamsMap: Property with paramName: {} put in sqlParamsMap of content {} successfully.", paramName, content);
            }

        }
    }

    /**
     * print replace sql
     *
     * @param content content
     * @param formatSql format sql
     * @param rgex rgex
     * @param sqlParamsMap sql params map
     */
    private void printReplacedSql(String content, String formatSql, String rgex, Map<Integer, Property> sqlParamsMap) {
        //parameter print style
        logger.info("after replace sql , preparing : {}", formatSql);
        StringBuilder logPrint = new StringBuilder("replaced sql , parameters:");
        if (sqlParamsMap == null) {
            logger.info("printReplacedSql: sqlParamsMap is null.");
        } else {
            for (int i = 1; i <= sqlParamsMap.size(); i++) {
                logPrint.append(sqlParamsMap.get(i).getValue()).append("(").append(sqlParamsMap.get(i).getType()).append(")");
            }
        }
        logger.info("Sql Params are {}", logPrint);
    }

    /**
     * ready to execute SQL and parameter entity Map
     *
     * @return SqlBinds
     */
    private SqlBinds getSqlAndSqlParamsMap(String sql) {
        Map<Integer, Property> sqlParamsMap = new HashMap<>();
        StringBuilder sqlBuilder = new StringBuilder();

        // combining local and global parameters
        Map<String, Property> paramsMap = ParamUtils.convert(taskExecutionContext, getParameters());

        // spell SQL according to the final user-defined variable
        if (paramsMap == null) {
            sqlBuilder.append(sql);
            return new SqlBinds(sqlBuilder.toString(), sqlParamsMap);
        }

        //replace variable TIME with $[YYYYmmddd...] in sql when history run job and batch complement job
        sql = ParameterUtils.replaceScheduleTime(sql, taskExecutionContext.getScheduleTime());
        // special characters need to be escaped, ${} needs to be escaped
        String rgex = "['\"]*\\$\\{(.*?)\\}['\"]*";
        setSqlParamsMap(sql, rgex, sqlParamsMap, paramsMap);
        //Replace the original value in sql ！{...} ，Does not participate in precompilation
        String rgexo = "['\"]*\\!\\{(.*?)\\}['\"]*";
        sql = replaceOriginalValue(sql, rgexo, paramsMap);
        // replace the ${} of the SQL statement with the Placeholder
        String formatSql = sql.replaceAll(rgex, "?");
        sqlBuilder.append(formatSql);

        // print repalce sql
        printReplacedSql(sql, formatSql, rgex, sqlParamsMap);
        return new SqlBinds(sqlBuilder.toString(), sqlParamsMap);
    }

    private String replaceOriginalValue(String content, String rgex, Map<String, Property> sqlParamsMap) {
        Pattern pattern = Pattern.compile(rgex);
        while (true) {
            Matcher m = pattern.matcher(content);
            if (!m.find()) {
                break;
            }
            String paramName = m.group(1);
            String paramValue = sqlParamsMap.get(paramName).getValue();
            content = m.replaceFirst(paramValue);
        }
        return content;
    }

    /**
     * create function list
     *
     * @param udfs
     * @param defaultFS
     * @param logger logger
     * @return create function list
     */
    public List<String> createFuncs(List<UdfFuncRequest> udfs, String defaultFS, Logger logger) throws IOException {

        if (CollectionUtils.isEmpty(udfs)) {
            return null;
        }
        List<String> funcList = new ArrayList<>();
        // build temp function sql
        buildTempFuncSql(funcList, udfs,defaultFS);

        return funcList;
    }

    /**
     * 设置 hive的set参数
     * @param list
     * @param map
     */
    private void setSetParams(List<String> list, Map<String, String> map) {
        if(map!=null){
            //移除引擎参数和队列参数,这两个参数由其他任务参数配置
            map.remove("hive.execution.engine");
            map.remove("mapreduce.job.queuename");
            for (String key : map.keySet()) {
                list.add("set "+key+"="+map.get(key));
            }
        }
    }

    /**
     * 设置队列
     *
     * @param engine
     * @param queue
     * @return
     */
    private List<String> setEngineQueue(EngineType engine, String queue) {
        ArrayList<String> list = new ArrayList<>();
        if(engine!=null){
            list.add("set hive.execution.engine="+engine.name().toLowerCase());
            if(EngineType.SPARK==engine){
                list.add("set spark.master=yarn-cluster;");
            }
        }
        if(engine!=null && StringUtils.isNotBlank(queue)){
            switch (engine){
                case MR:
                    list.add("set mapreduce.job.queuename="+queue);
                    break;
                case SPARK:
                    list.add("set mapreduce.job.queuename="+queue);
                    break;
                default:
                    break;
            }
        }
        return list;
    }
    /**
     * build temp function sql
     * @param sqls                  sql list
     * @param udfs
     */
    private void buildTempFuncSql(List<String> sqls, List<UdfFuncRequest> udfs, String defaultFS) throws IOException {
        String resourceName;
        String uploadPath =CommonUtils.getHdfsUdfDir(taskExecutionContext.getProName());
        String keytabPath=((HiveConnectionParam)baseConnectionParam).getLoginUserKeytabPath();
        String krb5ConfPath=((HiveConnectionParam)baseConnectionParam).getJavaSecurityKrb5Conf();
        for (UdfFuncRequest udf : udfs) {
            String[] resourcePaths= udf.getResourceUrl().split("/");
            resourceName=resourcePaths[resourcePaths.length-1];
            String udfJarHdfsPath=uploadPath+"/"+resourceName;
            //检查udf-jar包是否已经在hdfs上存在，不存在则先下载至本地后上传
            UserHadoopUtils userHadoopUtils = new UserHadoopUtils(((HiveConnectionParam)baseConnectionParam).getUser(),keytabPath,krb5ConfPath,defaultFS);
            if(!userHadoopUtils.exists(udfJarHdfsPath)){
                logger.info("downloading udf {} jar file ......",udf.getFuncName());
                downloadResource(udf.getResourceUrl());
                userHadoopUtils.copyLocalToHdfs(this.taskExecutionContext.getExecutePath()+"/"+resourceName,udfJarHdfsPath,false,false);
            }else{
                logger.info("udf {} jar file is exist",udf.getFuncName());
            }
            sqls.add(String.format(CREATE_FUNCTION_FORMAT, udf.getFuncName(), udf.getClassName(),defaultFS+udfJarHdfsPath));
        }
    }

    /**
     * download resource file
     *
     * @param url url
     */
    private void downloadResource(String url) {
        try {
            //使用wget命令 下载文件
            wgetCommand(url,this.taskExecutionContext.getExecutePath());
        } catch (Exception e) {
            logger.error(e.getMessage(), e);
            throw new RuntimeException(e.getMessage());
        }
    }


}

