package avicit.bdp.dds.server.worker.task.datax;

import avicit.bdp.common.datasource.BaseDataSource;
import avicit.bdp.common.datasource.BaseKerberosDataSource;
import avicit.bdp.common.datasource.DataSourceFactory;
import avicit.bdp.common.service.dto.DataSourceDTO;
import avicit.bdp.common.utils.ConfigUtils;
import avicit.bdp.common.utils.DESUtils;
import avicit.bdp.common.utils.DateUtils;
import avicit.bdp.common.utils.SpringApplicationContext;
import avicit.bdp.common.utils.database.DBUtils;
import avicit.bdp.common.utils.redis.RedisCacheHelper;
import avicit.bdp.core.constant.ProcessConstants;
import avicit.bdp.core.datasource.enums.DataSourceType;
import avicit.bdp.core.util.json.JSONUtils;
import avicit.bdp.dds.common.Constants;
import avicit.bdp.dds.common.utils.OSUtils;
import avicit.bdp.dds.common.utils.ParameterUtils;
import avicit.bdp.dds.dispatch.enums.Flag;
import avicit.bdp.dds.dispatch.enums.IncrementModeEnum;
import avicit.bdp.dds.dispatch.process.Property;
import avicit.bdp.dds.dispatch.task.AbstractParameters;
import avicit.bdp.dds.dispatch.task.datax.DataxParameters;
import avicit.bdp.dds.server.entity.DataxTaskExecutionContext;
import avicit.bdp.dds.server.entity.TaskExecutionContext;
import avicit.bdp.dds.server.utils.DataxUtils;
import avicit.bdp.dds.server.utils.ParamUtils;
import avicit.bdp.dds.server.worker.task.AbstractTask;
import avicit.bdp.dds.server.worker.task.CommandExecuteResult;
import avicit.bdp.dds.server.worker.task.ShellCommandExecutor;
import avicit.bdp.dds.service.process.ProcessService;
import avicit.platform6.core.exception.BusinessException;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr;
import com.alibaba.druid.sql.ast.expr.SQLPropertyExpr;
import com.alibaba.druid.sql.ast.statement.SQLSelect;
import com.alibaba.druid.sql.ast.statement.SQLSelectItem;
import com.alibaba.druid.sql.ast.statement.SQLSelectQueryBlock;
import com.alibaba.druid.sql.ast.statement.SQLSelectStatement;
import com.alibaba.druid.sql.ast.statement.SQLUnionQuery;
import com.alibaba.druid.sql.parser.SQLStatementParser;
import com.alibaba.fastjson2.JSONObject;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;

import java.io.File;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.PosixFilePermissions;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * DataX task
 */
public class DataxTask extends AbstractTask {
    protected static final String DATAX_PYTHON = "python3";

    protected static final String DATAX_SHELL = "sh";

    /**
     * Java Command启动datax
     */
    protected static final String DATAX_JAVA = "java";

    /**
     * datax home path
     */
    private static final String DATAX_HOME_EVN = "${DATAX_HOME}";

    /**
     * datax channel count
     */
    protected static int DATAX_CHANNEL_COUNT = 1;

    /**
     * datax parameters
     */
    protected DataxParameters dataXParameters;

    /**
     * shell command executor
     */
    protected ShellCommandExecutor shellCommandExecutor;

    /**
     * taskExecutionContext
     */
    protected TaskExecutionContext taskExecutionContext;

    /**
     * password pattern
     */
    private final Pattern pwdPattern = Pattern.compile(Constants.DATASOURCE_PASSWORD_REGEX);
    /**
     * 是否使用sql方式采集
     */
    protected boolean isSql = true;

    private final ProcessService processService;
    private final RedisCacheHelper redisCacheHelper;

    /**
     * 终止任务处理
     */
    private boolean stop = false;
    /**
     * 上次作业同步时间
     */
    private String taskLastSyncTime = null;

    /**
     * constructor
     *
     * @param taskExecutionContext taskExecutionContext
     * @param logger               logger
     */
    public DataxTask(TaskExecutionContext taskExecutionContext, Logger logger) {
        super(taskExecutionContext, logger);
        this.taskExecutionContext = taskExecutionContext;
        this.shellCommandExecutor =
                new ShellCommandExecutor(this::logHandle, taskExecutionContext, logger);
        this.processService = SpringApplicationContext.getBean(ProcessService.class);
        this.redisCacheHelper = SpringApplicationContext.getBean(RedisCacheHelper.class);
    }

    /**
     * init DataX config
     */
    @Override
    public void init() {
        logger.info("datax task params {}", taskExecutionContext.getTaskParams());
        dataXParameters =
                JSONUtils.parseObject(taskExecutionContext.getTaskParams(), DataxParameters.class);
        if (dataXParameters == null || !dataXParameters.checkParameters()) {
            throw new RuntimeException("datax task params is not valid");
        }
        if (StringUtils.isBlank(dataXParameters.getSql())) {
            isSql = false;
        }
        // 生成sql
        if (StringUtils.isEmpty(dataXParameters.getSql())
                && StringUtils.isNotBlank(dataXParameters.getSourceTable())
                && StringUtils.isNotBlank(dataXParameters.getColumnSource())
                && StringUtils.isNotBlank(dataXParameters.getColumnTarget())) {
            // col1,col2
            String columnSource = dataXParameters.getColumnSource();
            // col1,col2
            String columnTarget = dataXParameters.getColumnTarget();
            String[] columnSourceArray = columnSource.split(Constants.COMMA);
            String[] columnTargetArray = columnTarget.split(Constants.COMMA);
            if (columnSourceArray.length != columnTargetArray.length) {
                throw new RuntimeException("datax task params is not valid");
            }
            StringBuilder buf = new StringBuilder();
            buf.append("select ");
            int i = 0;
            for (String columnName : columnSourceArray) {
                buf.append(columnName).append(" as ").append(columnTargetArray[i]);
                if (i < columnSourceArray.length - 1) {
                    buf.append(Constants.COMMA);
                }
                i++;
            }
            buf.append(" from ").append(dataXParameters.getSourceTable());
            dataXParameters.setSql(buf.toString());
            logger.info("datax sql = {}", dataXParameters.getSql());


            // 处理基于时间戳字段类型的增量任务
            if (IncrementModeEnum.INCREMENT_TIME_FIELD.equals(dataXParameters.getIncrementMode())) {
                handleTimeFiledIncrementType();
            }
        }
    }

    /**
     * run DataX process
     *
     * @throws Exception if error throws Exception
     */
    @Override
    public void handle() throws Exception {

        if (stop) {
            logger.info("任务无需运行，即将推出任务节点");
            setExitStatusCode(Constants.EXIT_CODE_SUCCESS);
            return;
        }

        try {
            List<String> execCommand = initCommand();
            // run datax process
            // String shellCommandFilePath = buildShellCommandFile(jsonFilePath, paramsMap);
            CommandExecuteResult commandExecuteResult = shellCommandExecutor.runCommand(execCommand);
            setExitStatusCode(commandExecuteResult.getExitStatusCode());
            setAppIds(commandExecuteResult.getAppIds());
            setProcessId(commandExecuteResult.getProcessId());

            if (StringUtils.isNotBlank(taskLastSyncTime)
                    && commandExecuteResult.getExitStatusCode() == Constants.EXIT_CODE_SUCCESS) {
                String cacheKey =
                        avicit.bdp.core.constant.Constants.REDIS_KEY_INCREMENT_LAST_SYNC_TIME
                                + taskExecutionContext.getTaskName();
                redisCacheHelper.set(cacheKey, taskLastSyncTime);
            }
        } catch (Exception e) {
            logger.error("datax task failure", e);
            setExitStatusCode(Constants.EXIT_CODE_FAILURE);
            throw e;
        }
    }

    /**
     * 初始化command
     *
     * @return
     * @throws Exception
     */
    protected List<String> initCommand() throws Exception {
        // set the name of the current thread
        String threadLoggerInfoName =
                String.format("TaskLogInfo-%s", taskExecutionContext.getTaskAppId());
        Thread.currentThread().setName(threadLoggerInfoName);

        List<String> execCommand = new ArrayList<>();
        // 获取datax执行模式（python、shell、java）
        String dataxExecMode = ConfigUtils.getInstance().getString("datax.exec.mode", "python");
        if ("shell".equals(dataxExecMode)) {
            // Shell启动Datax
            execCommand.add(DATAX_SHELL);
            String dataxShellHome = ConfigUtils.getInstance().getString("datax.shell.home");
            if (StringUtils.isEmpty(dataxShellHome)) {
                dataxShellHome = "/opt/soft/datax/bin/start.sh";
            }
            execCommand.add(dataxShellHome);
        } else if ("java".equals(dataxExecMode)) {
            initDataxJavaCommand(execCommand);

            return execCommand;
        } else {
            // Python启动Datax（默认方式）
            String pythonHome = ConfigUtils.getInstance().getString("python.home");
            if (StringUtils.isEmpty(pythonHome)) {
                pythonHome = DATAX_PYTHON;
            }
            execCommand.add(pythonHome);
            String dataxHome = ConfigUtils.getInstance().getStringFromDb("datax.home");
            logger.info(">>>>dataxHome = {}", dataxHome);
            if (StringUtils.isEmpty(dataxHome)) {
                dataxHome = "/joinata/plugins/datax/bin/datax.py";
            }
            execCommand.add(dataxHome);
        }

        // combining local and global parameters
        Map<String, Property> paramsMap =
                ParamUtils.convert(
                        ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()),
                        taskExecutionContext.getDefinedParams(),
                        dataXParameters.getLocalParametersMap(),
                        taskExecutionContext.getCmdTypeIfComplement(),
                        taskExecutionContext.getScheduleTime());

        execCommand.add(buildDataxJsonFile(paramsMap));

        return execCommand;
    }

    /**
     * cancel DataX process
     *
     * @param cancelApplication cancelApplication
     * @throws Exception if error throws Exception
     */
    @Override
    public void cancelApplication(boolean cancelApplication) throws Exception {
        // cancel process
        shellCommandExecutor.cancelApplication();
    }

    /**
     * build datax configuration file
     *
     * @return datax json file name
     * @throws Exception if error throws Exception
     */
    protected String buildDataxJsonFile(Map<String, Property> paramsMap) throws Exception {
        // generate json
        String fileName =
                String.format(
                        "%s/%s_job.json",
                        taskExecutionContext.getExecutePath(), taskExecutionContext.getTaskAppId());
        Path path = new File(fileName).toPath();
        if (Files.exists(path)) {
            return fileName;
        }

        String json;
        if (dataXParameters.getCustomConfig() == Flag.YES.ordinal()) {
            json = dataXParameters.getJson().replaceAll("\\r\\n", "\n");
            if (StringUtils.isNotBlank(json)) {
                json = passwordHandler(pwdPattern, json);
            }
        } else {
            JSONObject job = new JSONObject();
            job.put("content", buildDataxJobContentJson());
            job.put("setting", buildDataxJobSettingJson());

            JSONObject root = new JSONObject();
            root.put("job", job);
            root.put("core", buildDataxCoreJson());
            json = root.toString();
        }

        // replace placeholder
        json = ParameterUtils.convertParameterPlaceholders(json, ParamUtils.convert(paramsMap));
        logger.info("datax job json : {}", json);

        // create datax json file
        FileUtils.writeStringToFile(new File(fileName), json, StandardCharsets.UTF_8);
        return fileName;
    }

    /**
     * password regex
     *
     * @param json json
     */
    private String passwordHandler(Pattern pwdPattern, String json) {
        Matcher matcher = pwdPattern.matcher(json);
        StringBuffer sb = new StringBuffer(json.length());

        while (matcher.find()) {
            String password = matcher.group();
            String decryptPassword = DESUtils.decrypt(password);

            // 特殊字符转义
            decryptPassword = decryptPassword.replace("\\", "\\\\\\");
            decryptPassword = decryptPassword.replace("$", "\\$");

            matcher.appendReplacement(sb, decryptPassword);
        }
        matcher.appendTail(sb);
        return sb.toString();
    }

    /**
     * 处理jdbcUrl 由于greenplum插件不支持此格式jdbc:pivotal:greenplum://192.168.0.35:5432;databaseName=dbtest
     *
     * @param datasourceType 数据库类型
     * @param baseDataSource 数据源配置
     * @return 拼接处理的jdbc连接url
     */
    private String handleDatasourceUrl(String datasourceType, BaseDataSource baseDataSource) {
        String newUrl = baseDataSource.getJdbcUrl();
        if (DataSourceType.GREENPLUM6.getName().equalsIgnoreCase(datasourceType)) {
            newUrl =
                    "jdbc:postgresql://"
                            + baseDataSource.getIp()
                            + ":"
                            + baseDataSource.getPort()
                            + "/"
                            + baseDataSource.getDatabase()
                            + "?"
                            + baseDataSource.getOther();
        }
        return newUrl;
    }

    /**
     * build datax job config
     *
     * @return collection of datax job config JSONObject
     */
    private List<JSONObject> buildDataxJobContentJson() {
        DataxTaskExecutionContext dataxTaskExecutionContext =
                taskExecutionContext.getDataxTaskExecutionContext();
        BaseDataSource dataSourceCfg =
                DataSourceFactory.getDatasource(
                        dataxTaskExecutionContext.getSourcetype(),
                        dataxTaskExecutionContext.getSourceConnectionParams());
        if (dataSourceCfg == null) {
            throw new RuntimeException("数据源不能为空!");
        }

        BaseDataSource dataTargetCfg =
                DataSourceFactory.getDatasource(
                        dataxTaskExecutionContext.getTargetType(),
                        dataxTaskExecutionContext.getTargetConnectionParams());
        if (dataTargetCfg == null) {
            throw new RuntimeException("数据源不能为空!");
        }

        List<JSONObject> readerConnArr = new ArrayList<>();
        JSONObject readerConn = new JSONObject();
        readerConn.put("querySql", new String[]{dataXParameters.getSql()});
        readerConn.put(
                "jdbcUrl",
                new String[]{
                        handleDatasourceUrl(dataxTaskExecutionContext.getSourcetype(), dataSourceCfg)
                });
        readerConnArr.add(readerConn);

        JSONObject readerParam = new JSONObject();
        handleConnectionInfo(readerParam, dataSourceCfg);
        readerParam.put("connection", readerConnArr);

        JSONObject reader = new JSONObject();
        reader.put("name", DataxUtils.getReaderPluginName(dataxTaskExecutionContext.getSourcetype()));
        reader.put("parameter", readerParam);
        reader.put("fetchSize", 1000);

        List<JSONObject> writerConnArr = new ArrayList<>();
        JSONObject writerConn = new JSONObject();
        writerConn.put("table", new String[]{dataXParameters.getTargetTable()});
        writerConn.put(
                "jdbcUrl", handleDatasourceUrl(dataxTaskExecutionContext.getTargetType(), dataTargetCfg));
        writerConnArr.add(writerConn);

        JSONObject writerParam = new JSONObject();
        handleConnectionInfo(writerParam, dataTargetCfg);

        writerParam.put("connection", writerConnArr);
        writerParam.put(
                "column",
                parsingSqlColumnNames(
                        dataxTaskExecutionContext.getSourcetype(),
                        dataxTaskExecutionContext.getTargetType(),
                        dataSourceCfg,
                        dataXParameters.getSql()));

        //默认是全量采集
        String preSql = "TRUNCATE  table " + dataXParameters.getTargetTable();
        List<String> preSqlList = new ArrayList<>();
        preSqlList.add(preSql);
        dataXParameters.setPreStatements(preSqlList);

        if (CollectionUtils.isNotEmpty(dataXParameters.getPreStatements())) {
            writerParam.put("preSql", dataXParameters.getPreStatements());
        }

        if (CollectionUtils.isNotEmpty(dataXParameters.getPostStatements())) {
            writerParam.put("postSql", dataXParameters.getPostStatements());
        }

        JSONObject writer = new JSONObject();
        writer.put("name", DataxUtils.getWriterPluginName(dataxTaskExecutionContext.getTargetType()));
        writer.put("parameter", writerParam);
        writer.put("batchSize", 500);

        List<JSONObject> contentList = new ArrayList<>();
        JSONObject content = new JSONObject();
        content.put("reader", reader);
        content.put("writer", writer);
        contentList.add(content);

        return contentList;
    }

    /**
     * 处理数据源连接信息
     *
     * @param params
     * @param dataSource
     * @return void
     * @author xugb
     * @date 2021/1/28 15:41
     */
    private void handleConnectionInfo(JSONObject params, BaseDataSource dataSource) {
        params.put("username", dataSource.getUser());
        params.put("password", DESUtils.decrypt(dataSource.getPassword()));
        // 处理kerberos
        if (dataSource instanceof BaseKerberosDataSource) {
            BaseKerberosDataSource ds = (BaseKerberosDataSource) dataSource;
            params.put("haveKerberos", ds.getKerberos() != null && ds.getKerberos() == 1);
            params.put("krb5Path", ds.getKrb5Path());
            params.put("keytabPath", ds.getKeytabPath());
            params.put("principal", ds.getPrincipal());
        } else {
            params.put("haveKerberos", false);
            params.put("krb5Path", "");
            params.put("keytabPath", "");
            params.put("principal", "");
        }
    }

    /**
     * build datax setting config
     *
     * @return datax setting config JSONObject
     */
    private JSONObject buildDataxJobSettingJson() {
        JSONObject speed = new JSONObject();
        speed.put("channel", DATAX_CHANNEL_COUNT);

        if (dataXParameters.getJobSpeedByte() > 0) {
            speed.put("byte", dataXParameters.getJobSpeedByte());
        }

        if (dataXParameters.getJobSpeedRecord() > 0) {
            speed.put("record", dataXParameters.getJobSpeedRecord());
        }

        JSONObject errorLimit = new JSONObject();
        errorLimit.put("record", 0);
        errorLimit.put("percentage", 0);

        JSONObject setting = new JSONObject();
        setting.put("speed", speed);
        setting.put("errorLimit", errorLimit);

        return setting;
    }

    private JSONObject buildDataxCoreJson() {
        JSONObject speed = new JSONObject();
        speed.put("channel", DATAX_CHANNEL_COUNT);

        if (dataXParameters.getJobSpeedByte() > 0) {
            speed.put("byte", dataXParameters.getJobSpeedByte());
        }

        if (dataXParameters.getJobSpeedRecord() > 0) {
            speed.put("record", dataXParameters.getJobSpeedRecord());
        }

        JSONObject channel = new JSONObject();
        channel.put("speed", speed);

        JSONObject transport = new JSONObject();
        transport.put("channel", channel);

        JSONObject core = new JSONObject();
        core.put("transport", transport);

        return core;
    }

    /**
     * create command
     *
     * @return shell command file name
     * @throws Exception if error throws Exception
     */
    private String buildShellCommandFile(String jobConfigFilePath, Map<String, Property> paramsMap)
            throws Exception {
        // generate scripts
        String fileName =
                String.format(
                        "%s/%s_node.%s",
                        taskExecutionContext.getExecutePath(),
                        taskExecutionContext.getTaskAppId(),
                        OSUtils.isWindows() ? "bat" : "sh");

        Path path = new File(fileName).toPath();

        if (Files.exists(path)) {
            return fileName;
        }

        // datax python command
        StringBuilder sbr = new StringBuilder();
        if (OSUtils.isWindows()) {
            // 设置DATAX_HOME
            String datax_home = System.getenv().getOrDefault("DATAX_HOME", "datax.py");
            sbr.append("python");
            sbr.append(" ");
            sbr.append(datax_home);
        } else {
            sbr.append(DATAX_PYTHON);
            sbr.append(" ");
            sbr.append(DATAX_HOME_EVN);
        }
        sbr.append(" ");
        sbr.append(jobConfigFilePath);

        if (OSUtils.isWindows()) {
            Files.createFile(path);
        } else {
            // create shell command file
            Set<PosixFilePermission> perms = PosixFilePermissions.fromString(Constants.RWXR_XR_X);
            FileAttribute<Set<PosixFilePermission>> attr = PosixFilePermissions.asFileAttribute(perms);
            Files.createFile(path, attr);
        }

        // replace placeholder
        String dataxCommand =
                ParameterUtils.convertParameterPlaceholders(sbr.toString(), ParamUtils.convert(paramsMap));
        logger.info("raw script : {}", dataxCommand);
        Files.write(path, dataxCommand.getBytes(), StandardOpenOption.APPEND);

        return fileName;
    }

    /**
     * parsing synchronized column names in SQL statements
     *
     * @param dsType        the database type of the data source
     * @param dtType        the database type of the data target
     * @param dataSourceCfg the database connection parameters of the data source
     * @param sql           sql for data synchronization
     * @return Keyword converted column names
     */
    private String[] parsingSqlColumnNames(
            String dsType, String dtType, BaseDataSource dataSourceCfg, String sql) {
        String[] columnNames;
        if (!isSql) {
            columnNames = dataXParameters.getColumnTarget().split(Constants.COMMA);
        } else {
            columnNames = tryGrammaticalAnalysisSqlColumnNames(dsType, sql);

            if (columnNames == null || columnNames.length == 0) {
                logger.info("try to execute sql analysis query column name");
                columnNames = tryExecuteSqlResolveColumnNames(dataSourceCfg, sql);
            }
        }

        notNull(columnNames, String.format("parsing sql columns failed : %s", sql));

        return DataxUtils.convertKeywordsColumns(dtType, columnNames);
    }

    /**
     * try grammatical parsing column
     *
     * @param dbType database type
     * @param sql    sql for data synchronization
     * @return column name array
     * @throws RuntimeException if error throws RuntimeException
     */
    private String[] tryGrammaticalAnalysisSqlColumnNames(String dbType, String sql) {
        String[] columnNames;
        try {
            SQLStatementParser parser = DataxUtils.getSqlStatementParser(dbType, sql);
            notNull(parser, String.format("database driver [%s] is not support", dbType));

            SQLStatement sqlStatement = parser.parseStatement();
            SQLSelectStatement sqlSelectStatement = (SQLSelectStatement) sqlStatement;
            SQLSelect sqlSelect = sqlSelectStatement.getSelect();

            List<SQLSelectItem> selectItemList = null;
            if (sqlSelect.getQuery() instanceof SQLSelectQueryBlock) {
                SQLSelectQueryBlock block = (SQLSelectQueryBlock) sqlSelect.getQuery();
                selectItemList = block.getSelectList();
            } else if (sqlSelect.getQuery() instanceof SQLUnionQuery) {
                SQLUnionQuery unionQuery = (SQLUnionQuery) sqlSelect.getQuery();
                SQLSelectQueryBlock block = (SQLSelectQueryBlock) unionQuery.getRight();
                selectItemList = block.getSelectList();
            }

            notNull(
                    selectItemList,
                    String.format("select query type [%s] is not support", sqlSelect.getQuery().toString()));

            columnNames = new String[selectItemList.size()];
            for (int i = 0; i < selectItemList.size(); i++) {
                SQLSelectItem item = selectItemList.get(i);

                String columnName = null;

                if (item.getAlias() != null) {
                    columnName = item.getAlias();
                } else if (item.getExpr() != null) {
                    if (item.getExpr() instanceof SQLPropertyExpr) {
                        SQLPropertyExpr expr = (SQLPropertyExpr) item.getExpr();
                        columnName = expr.getName();
                    } else if (item.getExpr() instanceof SQLIdentifierExpr) {
                        SQLIdentifierExpr expr = (SQLIdentifierExpr) item.getExpr();
                        columnName = expr.getName();
                    }
                } else {
                    throw new RuntimeException(
                            String.format("grammatical analysis sql column [ %s ] failed", item.toString()));
                }

                if (columnName == null) {
                    throw new RuntimeException(
                            String.format("grammatical analysis sql column [ %s ] failed", item.toString()));
                }

                columnNames[i] = columnName;
            }
        } catch (Exception e) {
            logger.warn(e.getMessage(), e);
            return null;
        }

        return columnNames;
    }

    /**
     * try to execute sql to resolve column names
     *
     * @param baseDataSource the database connection parameters
     * @param sql            sql for data synchronization
     * @return column name array
     */
    public String[] tryExecuteSqlResolveColumnNames(BaseDataSource baseDataSource, String sql) {
        String[] columnNames;
        sql = String.format("SELECT t.* FROM ( %s ) t WHERE 0 = 1", sql);
        sql = sql.replace(";", "");

        try (Connection connection =
                     DriverManager.getConnection(
                             baseDataSource.getJdbcUrl(),
                             baseDataSource.getUser(),
                             DESUtils.decrypt(baseDataSource.getPassword()));
             PreparedStatement stmt = connection.prepareStatement(sql);
             ResultSet resultSet = stmt.executeQuery()) {

            ResultSetMetaData md = resultSet.getMetaData();
            int num = md.getColumnCount();
            columnNames = new String[num];
            for (int i = 1; i <= num; i++) {
                columnNames[i - 1] = md.getColumnName(i).toLowerCase();
            }
        } catch (Exception e) {
            logger.warn(e.getMessage(), e);
            return null;
        }

        return columnNames;
    }

    @Override
    public AbstractParameters getParameters() {
        return dataXParameters;
    }

    private void notNull(Object obj, String message) {
        if (obj == null) {
            throw new RuntimeException(message);
        }
    }

    /**
     * Java原生命令启动datax 1、不能使用bash-c方式启动datax；bash -c默认后面的字符串是完整命令，但是ProcessBuilder会按照字符串中空格拆分命令；
     * 2、CLASSPATH需要设置在ProcessBuilder.env中才生效； 3、可以使用-Djava.ext.dirs替代-classpath；
     * 4、command中不能由空格，如果有要拆分为多个command；
     *
     * @param execCommand
     */
    private void initDataxJavaCommand(List<String> execCommand) {
        try {
            // 获取datax安装路径
            String dataxHome = ConfigUtils.getInstance().getString("datax.home");
            if (StringUtils.isEmpty(dataxHome)) {
                dataxHome = "/joinata/plugins/datax/bin/datax.py";
            }
            String dataxInstallPath = getDataxInstallPath(dataxHome);

            // datax job json配置文件
            Map<String, Property> paramsMap =
                    ParamUtils.convert(
                            ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()),
                            taskExecutionContext.getDefinedParams(),
                            dataXParameters.getLocalParametersMap(),
                            taskExecutionContext.getCmdTypeIfComplement(),
                            taskExecutionContext.getScheduleTime());
            String jobJson = buildDataxJsonFile(paramsMap);

            // 组装命令
            execCommand.add(DATAX_JAVA);
            execCommand.add("-server");
            execCommand.add("-Xms1024m");
            execCommand.add("-Xmx1024m");
            execCommand.add("-XX:+HeapDumpOnOutOfMemoryError");
            execCommand.add(String.format("-XX:HeapDumpPath=%s/log", dataxInstallPath));
            execCommand.add("-Dfile.encoding=UTF-8");
            execCommand.add("-Dlogback.statusListenerClass=ch.qos.logback.core.status.NopStatusListener");
            execCommand.add("-Djava.security.egd=file:///dev/urandom");
            execCommand.add("-Ddatax.home=" + dataxInstallPath);
            execCommand.add("-Dlogback.configurationFile=" + dataxInstallPath + "/conf/logback.xml");
            execCommand.add("-Djava.ext.dirs=" + dataxInstallPath + "/lib/");
            execCommand.add("-Dlog.file.name=" + getDatxLogFileName(jobJson));
            execCommand.add("com.alibaba.datax.core.Engine");
            execCommand.add("-mode");
            execCommand.add("standalone");
            execCommand.add("-jobid");
            execCommand.add("-1");
            execCommand.add("-job");
            execCommand.add(jobJson);
        } catch (Exception e) {
            throw new BusinessException(e.getMessage());
        }
    }

    /**
     * datax日志文件名称(最后20个字符，/替换为-，.替换为-)
     * 参考示例：/tmp/bdp/exec/process/123456/402880a47e4d6053017e4d761fb30459/402880a47f54962c017f6da5c93b0362
     * /402880a47f54962c017f6da5ed360365/402880a47f54962c017f6da5ed360365_job.json，替换后日志名称为da5ed360365_job_json
     *
     * @param jobJson
     * @return
     */
    private String getDatxLogFileName(String jobJson) {
        if (jobJson.length() < 20) {
            return jobJson.replace('/', '_').replace('.', '_');
        }

        String tmpStr = jobJson.substring(jobJson.length() - 20);
        return tmpStr.replace('/', '_').replace('.', '_');
    }

    /**
     * 获取datax安装路径 eg:输入"/joinata/plugins/datax/bin/datax.py",返回"/joinata/plugins/datax"
     *
     * @param pythonHome
     * @return
     */
    private String getDataxInstallPath(String pythonHome) {
        if (StringUtils.isEmpty(pythonHome)) {
            throw new BusinessException("未配置datax.home");
        }

        File currPath = new File(pythonHome);
        if (currPath == null) {
            throw new BusinessException("未配置datax.home");
        }

        File parentPath = new File(currPath.getParent());
        if (parentPath == null) {
            throw new BusinessException("未配置datax.home");
        }

        return parentPath.getParent();
    }

    private void handleTimeFiledIncrementType() {
        Map<String, Property> localParamsMap = dataXParameters.getLocalParametersMap();
        if (localParamsMap == null) {
            localParamsMap = new HashMap<>();
        }

        String cacheKey =
                avicit.bdp.core.constant.Constants.REDIS_KEY_INCREMENT_LAST_SYNC_TIME
                        + taskExecutionContext.getTaskName();

        // 上次同步任务数据时间,默认为用户自定义增量任务时界面设置的同步开始时间
        String lastSyncTimeStr = redisCacheHelper.get(cacheKey);
        logger.info("从缓存【{}】获取上次同步时间【{}】", cacheKey, lastSyncTimeStr);

        // 查询数据库获取当前源表时间列的最大值
        String MAX_TIME_VALUE = "max_time_value";
        String sql =
                String.format(
                        "SELECT MAX(%s) AS %S FROM %s",
                        dataXParameters.getSourceIncrementColumn(),
                        MAX_TIME_VALUE,
                        dataXParameters.getSourceTable());

        String maxTimeFromTableStr = null;
        try {
            DataSourceDTO sourceDatasource =
                    processService.findDataSourceById(dataXParameters.getDataSource());
            Connection connection = DBUtils.buildConnection(sourceDatasource);
            List<Map<String, Object>> resultList = DBUtils.getMapsBySql(sql, connection);
            if (CollectionUtils.isNotEmpty(resultList)) {
                maxTimeFromTableStr = (String) resultList.get(0).get(MAX_TIME_VALUE);
                logger.info("从源表【{}】查询到最大时间值为【{}】", dataXParameters.getSourceTable(), maxTimeFromTableStr);
            } else {
                // 没有查询到数据，可能不存在数据，结束任务
                stop = true;
                return;
            }
        } catch (Exception e) {
            logger.error("获取表中最大时间值异常", e);
            throw new RuntimeException(e);
        }

        if (StringUtils.isEmpty(maxTimeFromTableStr)) {
            return;
        }

        if (StringUtils.isEmpty(lastSyncTimeStr)) {
            lastSyncTimeStr = maxTimeFromTableStr;
        }

        // 计算同步区间开始时间startTime和结束时间endTime
        Date lastSyncTime = DateUtils.stringToDate(lastSyncTimeStr);
        Date maxTimeFromTable = DateUtils.stringToDate(maxTimeFromTableStr);
        if (lastSyncTime.getTime() < maxTimeFromTable.getTime()) {
            // lastSyncTime < maxTimeFromTable: startTime = lastSyncTime, endTime = maxTimeFromTable
            String startTime = DateUtils.getDateByString(lastSyncTime);
            String endTime = DateUtils.getDateByString(maxTimeFromTable);
            taskLastSyncTime = endTime;
            Property startProperty =
                    new Property(ProcessConstants.DATA_START_TIME, null, null, startTime);
            Property endProperty = new Property(ProcessConstants.DATA_END_TIME, null, null, endTime);
            localParamsMap.put(ProcessConstants.DATA_START_TIME, startProperty);
            localParamsMap.put(ProcessConstants.DATA_END_TIME, endProperty);
            dataXParameters.setLocalParams(new ArrayList<>(localParamsMap.values()));
        } else {
            stop = true;
        }
    }
}
