package com.sh.data.engine.domain.integration.offline.service;

import cn.hutool.core.util.XmlUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.sh.data.engine.common.constants.Constants;
import com.sh.data.engine.common.enumDefinition.DSType;
import com.sh.data.engine.common.enumDefinition.DataXJobTypeEnum;
import com.sh.data.engine.common.enumDefinition.OfflineSyncTaskStatus;
import com.sh.data.engine.common.exception.BusinessException;
import com.sh.data.engine.common.util.ScheduleCycleUtil;
import com.sh.data.engine.domain.base.model.datax.*;
import com.sh.data.engine.domain.base.model.datax.DataX.Channel;
import com.sh.data.engine.domain.base.model.datax.DataX.Core;
import com.sh.data.engine.domain.base.model.datax.DataX.Speed;
import com.sh.data.engine.domain.base.model.datax.DataX.Transport;
import com.sh.data.engine.domain.base.model.entity.TbTaskFieldMapEntity;
import com.sh.data.engine.domain.base.model.enums.Database;
import com.sh.data.engine.domain.common.service.CommonService;
import com.sh.data.engine.domain.integration.api.http.model.domain.TaskDomain;
import com.sh.data.engine.domain.integration.api.http.service.ApiHttpService;
import com.sh.data.engine.domain.integration.api.record.model.domain.ApiRunningRecordDomain;
import com.sh.data.engine.domain.integration.api.record.service.ApiRunningRecordService;
import com.sh.data.engine.domain.integration.api.taskconfig.model.domain.TaskConfigDomain;
import com.sh.data.engine.domain.integration.api.webservice.service.ApiWsService;
import com.sh.data.engine.domain.integration.datasource.model.domain.DataSourceDetailDomain;
import com.sh.data.engine.domain.integration.datasource.model.domain.DataSourceManagerDomain;
import com.sh.data.engine.domain.integration.datasource.service.DataSourceService;
import com.sh.data.engine.domain.integration.offline.model.domain.OfflineSyncDomain;
import com.sh.data.engine.domain.integration.offline.model.domain.OfflineSyncRunningRecordDomain;
import com.sh.data.engine.domain.integration.offline.model.enums.OfflineSyncExecuTypeEnum;
import com.sh.data.engine.domain.integration.offline.model.mapper.TbTaskFieldMapEntityMapper;
import com.sh.data.engine.domain.integration.offline.runner.DataXRunner;
import com.sh.data.engine.domain.shims.DbManagerFactory;
import com.sh.data.engine.domain.shims.db.BaseDbManager;
import com.sh.data.engine.domain.shims.db.DbOptions;
import com.sh.data.engine.domain.shims.db.model.FieldInfoDomain;
import com.sh.data.engine.domain.util.DBUtil;
import com.sh.data.engine.domain.util.VarParserUtil;
import com.sh.data.engine.infrastructure.config.DataXConfig;
import com.sh.data.engine.infrastructure.config.FileStorageConfiguration;
import com.sh.data.engine.job.core.context.XxlJobContext;
import com.sh.data.engine.job.core.context.XxlJobHelper;
import com.sh.data.engine.job.core.glue.GlueTypeEnum;
import com.sh.data.engine.job.core.util.ScriptUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;

import java.io.File;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;

/**
 * @author binghe.yxh
 */
@Slf4j
@Service
public class DataXService {

    private static final Pattern dmJdbcPattern = Pattern.compile("jdbc:dm://[\\d.]+:\\d+/(\\S+)");

    @Lazy
    @Autowired
    private DataSourceService dataSourceService;

    @Lazy
    @Autowired
    private OfflineSyncService offlineSyncService;

    @Autowired
    private OfflineSyncRunningRecordService offlineSyncRunningRecordService;

    @Lazy
    @Autowired
    private FileStorageConfiguration fileStorageConfiguration;

    @Lazy
    @Autowired
    private ApiHttpService apiHttpService;

    @Lazy
    @Autowired
    private ApiWsService apiWsService;

    @Autowired
    private ApiRunningRecordService apiRunningRecordService;

    @Autowired
    private CommonService commonService;

    @Autowired
    private DataXConfig dataXConfig;

    @Value("${data.engine.localHostIp:}")
    private String localHostIp;

    private static final Map<Long, DataXRunner> taskCache = Maps.newConcurrentMap();

    private static final int GP_FILE_INPUT = 1;
    @Autowired
    private TbTaskFieldMapEntityMapper tbTaskFieldMapEntityMapper;

    /**
     * 执行datax离线同步（glue-groovy生成json脚本+shell脚本，shell方式执行）
     *
     * @param taskId
     * @return 返回job_log id
     */
    public String executeOfflineSync(Long taskId, Integer executeType) {
        OfflineSyncDomain offlineSyncDomain = offlineSyncService.getOfflineSyncById(taskId);
        if (Objects.isNull(offlineSyncDomain)) {
            throw new BusinessException("离线同步任务不存在，请检查后重试！");
        }

        List<OfflineSyncDomain.FieldMapping> fieldMappings = offlineSyncDomain.getFieldMappings();
        if (CollectionUtils.isEmpty(fieldMappings)) {
            throw new BusinessException("离线同步任务-数据表字段映射关系不存在，请检查任务配置后重试！");
        }

        // 获取job_log id 记录到 离线同步运行记录
        // 1、持久化运行记录
        // 0-周期 1-手动

        String ipInfo = getIpInfoByLogId();
        String localHostIp = this.localHostIp;
        if (StringUtils.isNotBlank(localHostIp)) {
            ipInfo = localHostIp;
        }
        Date planTime = offlineSyncDomain.getPlanTime();
        log.info("当前执行数据集成的ip是:{}", ipInfo);
        OfflineSyncRunningRecordDomain runningRecord =
            OfflineSyncRunningRecordDomain.builder()
                .offlineSyncId(taskId)
                .startTime(new Date())
                .endTime(null)
                .sourceTable(offlineSyncDomain.getSourceTable())
                .targetTable(offlineSyncDomain.getTargetTable())
                .executeType(executeType)
                .ipAddress(ipInfo)
                .planTime(new Date())
                .creatorId(offlineSyncDomain.getCreatorId())
                .updaterId(offlineSyncDomain.getUpdaterId())
                .taskStatus(OfflineSyncTaskStatus.running.getTaskStatus())
                .taskName(offlineSyncDomain.getTaskName())
                .projectId(offlineSyncDomain.getProjectId())
                .build();

        if (executeType.equals(OfflineSyncExecuTypeEnum.AUTOMIC.getStatus())) {
            OfflineSyncDomain.OfflineSyncSchedule schedule = offlineSyncDomain.getSchedule();
            Date nextPlanTime = offlineSyncDomain.getPlanTime();
            if (Objects.isNull(planTime)) {
                nextPlanTime = new Date();
            }
            String startTimeHour = schedule.getStartTimeHour();
            String startTimeMin = schedule.getStartTimeMin();
            String endTimeHour = schedule.getEndTimeHour();
            String endTimeMin = schedule.getEndTimeMin();

            Integer startTimeH = null;
            Integer startTimeM = null;
            Integer endTimeH = null;
            Integer endTimeM = null;
            if (StringUtils.isNotBlank(startTimeHour)) {
                startTimeH = Integer.valueOf(schedule.getStartTimeHour());
            }
            if (StringUtils.isNotBlank(startTimeMin)) {
                startTimeM = Integer.valueOf(schedule.getStartTimeMin());
            }
            if (StringUtils.isNotBlank(endTimeHour)) {
                endTimeH = Integer.valueOf(schedule.getEndTimeHour());
            }
            if (StringUtils.isNotBlank(endTimeMin)) {
                endTimeM = Integer.valueOf(schedule.getEndTimeMin());
            }
            runningRecord.setPlanTime(nextPlanTime);
            Date nextExecutionTime =
                ScheduleCycleUtil.getNextExecutionTime(
                    schedule.getCycle(),
                    schedule.getExecPoint(),
                    schedule.getEffectiveDateStart(),
                    schedule.getEffectiveDateEnd(),
                    nextPlanTime,
                    startTimeH,
                    startTimeM,
                    endTimeH,
                    endTimeM);
            offlineSyncDomain.setTaskRunStatus(OfflineSyncTaskStatus.running.getTaskStatus());
            offlineSyncDomain.setStartTime(new Date());
            offlineSyncDomain.setEndTime(null);
            offlineSyncDomain.setPlanTime(nextExecutionTime);
            offlineSyncService.updateOfflineSyncTask(offlineSyncDomain);
        }

        Long recordId = offlineSyncRunningRecordService.saveOfflineRunningRecord(runningRecord);
        runningRecord.setId(recordId);
        final String dir = fileStorageConfiguration.getDir();
        String jsonPath =
            String.format("%s/json/%s/%s.json", dir, Constants.LogFileExp.OFFLINE_SYNC_LOG, recordId);
        File file = new File(jsonPath);
        if (!file.getParentFile().exists()) {
            file.getParentFile().mkdirs();
        }
        if (file.exists()) {
            file.delete();
        }
        DataX dataX = this.buildDataXJob(offlineSyncDomain);
        String jobContent = this.setVars(dataX);
        try {
            FileUtils.writeStringToFile(file, jobContent, "UTF-8");
        } catch (IOException e) {
            log.error(e.getMessage());
        }
        DataXRunner dataXRunner =
            new DataXRunner(jsonPath, recordId, executeType, taskId, DataXJobTypeEnum.SYNC, taskCache);

        String msgOfMemory = "正在资源检查";
        boolean memoryUsageBelowThreshold = true;
        dataXRunner.writeLog(Lists.newArrayList(msgOfMemory), recordId);
        try {
            memoryUsageBelowThreshold = commonService.isMemoryUsageBelowThreshold();
        } catch (BusinessException e) {
            memoryUsageBelowThreshold = true;
        }

        if (!memoryUsageBelowThreshold) {
            String noHavaMemory = "当前服务器资源,资源不足,任务停止。";
            dataXRunner.writeLog(Lists.newArrayList(noHavaMemory), recordId);
            dataXRunner.updateInfoAndLogStatus(OfflineSyncTaskStatus.stopped.getTaskStatus());
        } else {
            String noHavaMemory = "检测完毕，资源充足";
            dataXRunner.writeLog(Lists.newArrayList(noHavaMemory), recordId);
            Thread thread = new Thread(dataXRunner);
            taskCache.put(recordId, dataXRunner);
            thread.start();
        }
        return String.valueOf(recordId);
    }

    /**
     * 接口采集调用dataX
     *
     * @param taskId      任务id
     * @param executeType 执行类型
     * @return 执行记录id
     */
    public Long executeApiHttp(Long taskId, Integer executeType, Map<String, String> param) {
        log.info("taskId:{}开始执行", taskId);

        TaskDomain task = apiHttpService.getTaskById(taskId);
        if (Objects.isNull(task)) {
            throw new BusinessException("api采集任务不存在，请检查后重试！");
        }

        //todo 优化成支持参数，自动生成
        String jobContent = apiHttpService.buildApiDataxJson(taskId, param);
//    String jobContent = task.getDataxJobScript();
        if (StringUtils.isEmpty(jobContent)) {
            throw new BusinessException("api采集任务脚本不存在,请检查后重试！");
        }
        TaskConfigDomain taskConfig = task.getTaskConfigRequest();
        if (Objects.isNull(taskConfig)) {
            throw new BusinessException("api任务配置不存在,请检查后重试！");
        }

        String ipInfo = getIpInfoByLogId();
        String localHostIp = this.localHostIp;
        if (StringUtils.isNotBlank(localHostIp)) {
            ipInfo = localHostIp;
        }

        // 获取job_log id 记录到 离线同步运行记录
        // 1、持久化运行记录
        // 0-周期 1-手动
        ApiRunningRecordDomain runningRecord =
            ApiRunningRecordDomain.builder()
                .apiId(taskId)
                .apiType("http")
                .startTime(new Date())
                .endTime(null)
                .executeType(executeType)
                .ipAddress(ipInfo)
                .taskStatus(OfflineSyncTaskStatus.running.getTaskStatus())
                .build();

        Long recordId = apiRunningRecordService.saveRecord(runningRecord);
        runningRecord.setId(recordId);
        if (executeType.equals(OfflineSyncExecuTypeEnum.AUTOMIC.getStatus())) {
            task.setTaskStatus(OfflineSyncTaskStatus.running.getTaskStatus());
            task.setStartTime(new Date());
            task.setEndTime(null);
//      apiHttpService.updateAfterStart(task.getId());
        }
        final String dir = fileStorageConfiguration.getDir();
        String jsonPath =
            String.format(
                "%s/json/%s/%s.json", dir, Constants.LogFileExp.OFFLINE_SYNC_API_LOG, recordId);
        File file = new File(jsonPath);
        if (!file.getParentFile().exists()) {
            file.getParentFile().mkdirs();
        }
        if (file.exists()) {
            file.delete();
        }

        try {
            FileUtils.writeStringToFile(file, jobContent, "UTF-8");
        } catch (IOException e) {
            log.error(e.getMessage());
        }
        DataXRunner dataXRunner =
            new DataXRunner(
                jsonPath, recordId, executeType, taskId, DataXJobTypeEnum.APIHTTP, taskCache);
        boolean memoryUsageBelowThreshold = true;
        try {
            memoryUsageBelowThreshold = commonService.isMemoryUsageBelowThreshold();
        } catch (BusinessException e) {
            memoryUsageBelowThreshold = true;
        }

        String msgOfMemory = "正在资源检查";
        dataXRunner.writeLog(Lists.newArrayList(msgOfMemory), recordId);
        if (!memoryUsageBelowThreshold) {
            String noHavaMemory = "当前服务器资源,资源不足,任务停止。";
            dataXRunner.writeLog(Lists.newArrayList(noHavaMemory), recordId);
            dataXRunner.updateInfoAndLogStatus(OfflineSyncTaskStatus.stopped.getTaskStatus());
        } else {
            String noHavaMemory = "检测完毕，资源充足";
            dataXRunner.writeLog(Lists.newArrayList(noHavaMemory), recordId);
            Thread thread = new Thread(dataXRunner);
            taskCache.put(recordId, dataXRunner);
            thread.start();
        }
        return recordId;
    }

    /**
     * 接口采集调用datax
     *
     * @param taskId
     * @param executeType
     * @return
     */
    public Long executeApiWs(Long taskId, Integer executeType, Map<String, String> param) {
        log.info("taskId:{}开始执行", taskId);
        com.sh.data.engine.domain.integration.api.webservice.model.domain.TaskDomain task =
            apiWsService.getTaskById(taskId);
        if (Objects.isNull(task)) {
            throw new BusinessException("api采集任务不存在，请检查后重试！");
        }

//    String jobContent = task.getDataxJobScript();
        String jobContent = apiWsService.buildApiDataxJson(taskId, param);
        if (StringUtils.isEmpty(jobContent)) {
            throw new BusinessException("api采集任务脚本不存在,请检查后重试！");
        }
        TaskConfigDomain taskConfig = task.getTaskConfigRequest();
        if (Objects.isNull(taskConfig)) {
            throw new BusinessException("api任务配置不存在,请检查后重试！");
        }

        // 获取job_log id 记录到 离线同步运行记录
        // 1、持久化运行记录
        // 0-周期 1-手动

        String ipInfo = getIpInfoByLogId();
        String localHostIp = this.localHostIp;
        if (StringUtils.isNotBlank(localHostIp)) {
            ipInfo = localHostIp;
        }
        ApiRunningRecordDomain runningRecord =
            ApiRunningRecordDomain.builder()
                .apiId(taskId)
                .apiType("http")
                .startTime(new Date())
                .endTime(null)
                .executeType(executeType)
                .ipAddress(ipInfo)
                .taskStatus(OfflineSyncTaskStatus.running.getTaskStatus())
                .build();

        Long recordId = apiRunningRecordService.saveRecord(runningRecord);
        runningRecord.setId(recordId);
        if (executeType.equals(OfflineSyncExecuTypeEnum.AUTOMIC.getStatus())) {
            task.setTaskStatus(OfflineSyncTaskStatus.running.getTaskStatus());
            task.setStartTime(new Date());
            task.setEndTime(null);
//      apiWsService.updateAfterStart(task.getId());
        }
        final String dir = fileStorageConfiguration.getDir();
        String jsonPath =
            String.format(
                "%s/json/%s/%s.json", dir, Constants.LogFileExp.OFFLINE_SYNC_API_LOG, recordId);
        File file = new File(jsonPath);
        if (!file.getParentFile().exists()) {
            file.getParentFile().mkdirs();
        }
        if (file.exists()) {
            file.delete();
        }

        try {
            FileUtils.writeStringToFile(file, jobContent, "UTF-8");
        } catch (IOException e) {
            log.error(e.getMessage());
        }
        DataXRunner dataXRunner =
            new DataXRunner(
                jsonPath, recordId, executeType, taskId, DataXJobTypeEnum.APIHTTP, taskCache);
        boolean memoryUsageBelowThreshold = true;
        try {
            memoryUsageBelowThreshold = commonService.isMemoryUsageBelowThreshold();
        } catch (BusinessException e) {
            memoryUsageBelowThreshold = true;
        }

        String msgOfMemory = "正在资源检查,检查内存是否足够";
        dataXRunner.writeLog(Lists.newArrayList(msgOfMemory), recordId);
        if (!memoryUsageBelowThreshold) {
            String noHavaMemory = "当前服务器资源,资源不足,任务停止。";
            dataXRunner.writeLog(Lists.newArrayList(noHavaMemory), recordId);
            dataXRunner.updateInfoAndLogStatus(OfflineSyncTaskStatus.stopped.getTaskStatus());
        } else {
            String noHavaMemory = "检测完毕，资源充足";
            dataXRunner.writeLog(Lists.newArrayList(noHavaMemory), recordId);
            Thread thread = new Thread(dataXRunner);
            taskCache.put(recordId, dataXRunner);
            thread.start();
        }
        return recordId;
    }

    /**
     * @param logId
     */
    public void stopApiHttp(Long logId) {
        DataXRunner dataXRunner = taskCache.get(logId);
        if (null != dataXRunner) {
            dataXRunner.stop();
            taskCache.remove(logId);
            // 改变taskStatus状态并存入表中
            ApiRunningRecordDomain existingRecord = apiRunningRecordService.getApiRunningRecord(logId);
            ApiRunningRecordDomain update =
                ApiRunningRecordDomain.builder()
                    .apiId(existingRecord.getApiId())
                    .endTime(new Date())
                    .taskStatus(OfflineSyncTaskStatus.stopped.getTaskStatus())
                    .build();
            update.setId(Long.valueOf(logId));
            apiRunningRecordService.saveRecord(update);
        } else {

            // 改变状态
            ApiRunningRecordDomain existingRecord = apiRunningRecordService.getApiRunningRecord(logId);
            ApiRunningRecordDomain update =
                ApiRunningRecordDomain.builder()
                    .apiId(existingRecord.getApiId())
                    .endTime(new Date())
                    .taskStatus(OfflineSyncTaskStatus.stopped.getTaskStatus())
                    .build();
            update.setId(Long.valueOf(logId));
            apiRunningRecordService.saveRecord(update);
//      apiHttpService.updateAfterStop(logId, OfflineSyncTaskStatus.stopped.getTaskStatus());
        }
    }

    /**
     * @param logId
     */
    public void stopApiWs(Long logId) {
        DataXRunner dataXRunner = taskCache.get(logId);
        if (null != dataXRunner) {
            dataXRunner.stop();
            taskCache.remove(logId);
        } else {
            // 改变状态
            ApiRunningRecordDomain update =
                ApiRunningRecordDomain.builder()
                    .endTime(new Date())
                    .taskStatus(OfflineSyncTaskStatus.stopped.getTaskStatus())
                    .build();
            update.setId(Long.valueOf(logId));
            apiRunningRecordService.saveRecord(update);
//      apiWsService.updateAfterStop(logId, OfflineSyncTaskStatus.stopped.getTaskStatus());
        }
    }

    /**
     * 创建datax
     *
     * @return
     */
    public DataX buildDataXJob(
        OfflineSyncDomain offlineSyncDomain /*, OfflineSyncDomain.TableMapping tableMapping*/) {
        // 1.初始化datax
        DataX dataX = new DataX();
        // 2.初始化 job
        DataX.Job job = new DataX.Job();
        // 2.1 初始化setting
        DataX.JobSetting setting = new DataX.JobSetting();
        DataX.JobSettingSpeed speed = new DataX.JobSettingSpeed();
        Integer channelNum =
            Optional.ofNullable(offlineSyncDomain.getChannel())
                .orElse(dataXConfig.getChannelNum());
        speed.setRecord(dataXConfig.getAllChannelSize());
        speed.setChannel(channelNum);
        setting.setSpeed(speed);
        if (offlineSyncDomain.getErrorNumLimit() != null && offlineSyncDomain.getErrorNumLimit() > 0) {
            DataX.JobSettingErrorLimit errorLimit = new DataX.JobSettingErrorLimit();
            errorLimit.setRecord(offlineSyncDomain.getErrorNumLimit());
            setting.setErrorLimit(errorLimit);
        }
        Core core =
            Core.builder()
                .transport(
                    Transport.builder()
                        .channel(
                            Channel.builder()
                                .speed(
                                    Speed.builder()
                                        .record(dataXConfig.getChannelSize())
                                        .build())
                                .build())
                        .build())
                .build();
        dataX.setCore(core);
        job.setSetting(setting);
        // 2.2 初始化 jobcontent
        List<DataX.JobContent> content = Lists.newArrayList();
        content.add(new DataX.JobContent());
        job.setContent(content);
        // 构建 3、reader
        // 关系型数据库
        String fromDsType = offlineSyncDomain.getSourceDsType();
        List<JobReaderConnection> readerConnections = Lists.newArrayList();
        DataSourceDetailDomain sourceDS =
            dataSourceService.getDataSourceDetailById(offlineSyncDomain.getSourceDsId(), false);
        if (DBUtil.checkRDBMS(fromDsType)) {
            DataSourceDetailDomain.RdbmsConfig rdbmsConfig = sourceDS.getRdbmsConfig();
            String datasourceVersion =
                Optional.ofNullable(rdbmsConfig).map(c -> c.getVersion()).orElse(null);
            JobReaderConnection jobConnection = new JobReaderConnection();
            jobConnection.setUsername(sourceDS.getUsername());
            jobConnection.setPassword(sourceDS.getPassword());
            List<String> jdbcs = Lists.newArrayList();
            jdbcs.add(sourceDS.getDsLink());
            jobConnection.setJdbcUrl(jdbcs);
            String table = offlineSyncDomain.getSourceTable();

            if (table.contains(".")
                && (fromDsType.equals(DSType.Oracle.name())
                || fromDsType.equals(DSType.Sap.name())
                || fromDsType.equals(DSType.DM.name())
                || fromDsType.equals(DSType.PostgreSQL.name()))
                || fromDsType.equals(DSType.KINGBASE8.name())) {
                String[] names = table.split("\\.", 2);
                table = "\"" + names[0] + "\"" + ".\"" + names[1] + "\"";
            } else if (table.contains(".") && fromDsType.equals(DSType.SQLServer.name())) {
                String[] names = table.split("\\.", 2);
                table = "\"" + names[0] + "\"" + ".[" + names[1] + "]";
            } else if (fromDsType.equals(DSType.MySQL.name()) && table.contains(".")) {
                table = "`" + table + "`";
            } else {
                if (table.contains("-")) {
                    table = "`" + table + "`";
                }
            }
            jobConnection.setTable(Lists.newArrayList(table));
            readerConnections.add(jobConnection);
            if (fromDsType.equals(DSType.MySQL.name())
                && StringUtils.isNotBlank(datasourceVersion)
                && datasourceVersion.startsWith("8.")) {
                fromDsType = fromDsType + "8";
            }
            DataX.JobContentReader reader = new DataX.JobContentReader();
            reader.setName(DataxReaderAndWriter.getReaderByName(fromDsType));

            JobContentReaderParameter parameter = new JobContentReaderParameter();

            parameter.setBatchSize(
                Optional.ofNullable(offlineSyncDomain.getBatchSize())
                    .orElse(dataXConfig.getBatchSize()));
            parameter.setFetchSize(
                Optional.ofNullable(offlineSyncDomain.getFetchSize())
                    .orElse(dataXConfig.getFetchSize()));
            parameter.setSplitPk(offlineSyncDomain.getSplitKey());
            parameter.setPassword(sourceDS.getPassword());
            parameter.setConnection(readerConnections);

            // 设置列
            List<String> columns = Lists.newArrayList();
            for (OfflineSyncDomain.FieldMapping field : offlineSyncDomain.getFieldMappings()) {
                if (fromDsType.equals(DSType.Oracle.name())
                    || fromDsType.equalsIgnoreCase(DSType.Sap.name())
                    || fromDsType.equalsIgnoreCase(DSType.SQLServer.name())
                    || fromDsType.equalsIgnoreCase(DSType.DM.name())
                    || fromDsType.equalsIgnoreCase(DSType.PostgreSQL.name())
                    || fromDsType.equalsIgnoreCase(DSType.KINGBASE8.name())) {
                    columns.add("\"" + field.getSourceField() + "\"");
                } else if (fromDsType.equals(DSType.MySQL.name())
                    || fromDsType.equals(DSType.MySQL.name() + "8")) {
                    columns.add("`" + field.getSourceField() + "`");
                } else {
                    columns.add(field.getSourceField());
                }
            }
            parameter.setColumn(columns);
            reader.setParameter(parameter);
            content.get(0).setReader(reader);
        } else if (DSType.Hive.name().equals(fromDsType)) { // hive
            JobReaderConnection jobConnection = new JobReaderConnection();
            List<String> jdbcs = Lists.newArrayList();
            String url = sourceDS.getDsLink();
            jdbcs.add(url);
            List<String> tables = Arrays.asList(offlineSyncDomain.getSourceTable());
            jobConnection.setTable(tables);
            jobConnection.setUsername(sourceDS.getUsername());
            jobConnection.setKeyTabPath("");
            jobConnection.setKrb5Conf("");
            jobConnection.setPassword(
                StringUtils.isBlank(sourceDS.getPassword())
                    ? ""
                    : sourceDS.getPassword());
            jobConnection.setJdbcUrl(jdbcs);
            readerConnections.add(jobConnection);

            DataX.JobContentReader reader = new DataX.JobContentReader();
            reader.setName(DataxReaderAndWriter.getReaderByName(fromDsType));
            JobContentReaderParameter parameter = new JobContentReaderParameter();
            // 设置列
            List<String> columns = Lists.newArrayList();
            for (OfflineSyncDomain.FieldMapping field : offlineSyncDomain.getFieldMappings()) {
                columns.add(field.getSourceField());
            }
            parameter.setColumn(columns);
            parameter.setBatchSize(
                Optional.ofNullable(offlineSyncDomain.getBatchSize())
                    .orElse(dataXConfig.getBatchSize()));
            parameter.setFetchSize(
                Optional.ofNullable(offlineSyncDomain.getFetchSize())
                    .orElse(dataXConfig.getFetchSize()));
            parameter.setSplitPk(offlineSyncDomain.getSplitKey());
            parameter.setConnection(readerConnections);
            reader.setParameter(parameter);
            content.get(0).setReader(reader);
        } else if (DSType.Mongodb.name().equals(fromDsType)) {
            DataX.JobContentReader reader = new DataX.JobContentReader();
            reader.setName(DataxReaderAndWriter.getReaderByName(fromDsType));

            JobContentReaderParameter parameter = new JobContentReaderParameter();
            parameter.setBatchSize(
                Optional.ofNullable(offlineSyncDomain.getBatchSize())
                    .orElse(dataXConfig.getBatchSize()));
            parameter.setFetchSize(
                Optional.ofNullable(offlineSyncDomain.getFetchSize())
                    .orElse(dataXConfig.getFetchSize()));
            parameter.setSplitPk(offlineSyncDomain.getSplitKey());
            // 设置列
            List<String> columns =
                offlineSyncDomain.getFieldMappings().stream()
                    .map(OfflineSyncDomain.FieldMapping::getSourceField)
                    .filter(StringUtils::isNotBlank)
                    .collect(Collectors.toList());
            parameter.setCollection(offlineSyncDomain.getSourceTable());
            parameter.setColumn(columns);
            parameter.setUrl(Lists.newArrayList(sourceDS.getDsLink()));
            parameter.setUsername(sourceDS.getUsername());
            parameter.setPassword(sourceDS.getPassword());
            parameter.setDatasource(sourceDS.getDbName());
            parameter.setAuthDb(sourceDS.getMongoConfig().getAuthDbName());
            reader.setParameter(parameter);
            content.get(0).setReader(reader);
        } else if (DSType.HBase.name().equals(fromDsType)) {
            DataX.JobContentReader reader = new DataX.JobContentReader();
            reader.setName(DataxReaderAndWriter.getReaderByName(fromDsType));
            JobContentReaderParameter parameter = new JobContentReaderParameter();
            parameter.setBatchSize(
                Optional.ofNullable(offlineSyncDomain.getBatchSize())
                    .orElse(dataXConfig.getBatchSize()));
            parameter.setFetchSize(
                Optional.ofNullable(offlineSyncDomain.getFetchSize())
                    .orElse(dataXConfig.getFetchSize()));
            parameter.setSplitPk(offlineSyncDomain.getSplitKey());
            String connectionProperties = sourceDS.getDsLink();
            JSONObject jsonObject = JSONObject.parseObject(connectionProperties);
            Set<String> keys = jsonObject.keySet();
            Map<String, String> configMap = new HashMap<>();
            for (String key : keys) {
                configMap.put(key, jsonObject.get(key).toString());
            }
            parameter.setHbaseConfig(configMap);
            List<ColumnInfo> typeList = Lists.newArrayList();
            for (OfflineSyncDomain.FieldMapping fieldMap : offlineSyncDomain.getFieldMappings()) {
                ColumnInfo hBaseColumnType = new ColumnInfo();
                hBaseColumnType.setName(fieldMap.getSourceField());
                hBaseColumnType.setType(fieldMap.getSourceFieldType());
                if (StringUtils.isNotBlank(fieldMap.getDateFormat())) {
                    hBaseColumnType.setFormat(fieldMap.getDateFormat());
                }
                typeList.add(hBaseColumnType);
            }
            parameter.setHbaseColumn(typeList);
            parameter.setTable(offlineSyncDomain.getSourceTable());
            // 先默认normal
            parameter.setMode("normal");
            reader.setParameter(parameter);
            content.get(0).setReader(reader);
        } else if (DSType.OpenTSDB.name().equals(fromDsType)) { // opentsdb
            DataX.JobContentReader reader = new DataX.JobContentReader();
            reader.setName(DataxReaderAndWriter.getReaderByName(fromDsType));
            JobContentReaderParameter parameter = new JobContentReaderParameter();
            parameter.setBatchSize(
                Optional.ofNullable(offlineSyncDomain.getBatchSize())
                    .orElse(dataXConfig.getBatchSize()));
            parameter.setFetchSize(
                Optional.ofNullable(offlineSyncDomain.getFetchSize())
                    .orElse(dataXConfig.getFetchSize()));
            parameter.setSplitPk(offlineSyncDomain.getSplitKey());
            reader.setParameter(parameter);
            content.get(0).setReader(reader);
            // TODO 设置时间序列开始结束时间
      /*parameter.setStart(opentsdb.getBeginTime().getTime() / 1000);
      parameter.setEnd(opentsdb.getEndTime().getTime() / 1000);*/
            // 设置连接
            List<JobReaderConnection> connections = Lists.newArrayList();
            JobReaderConnection jobConnection = new JobReaderConnection();
            List<String> jdbcs = Lists.newArrayList();
            jdbcs.add(sourceDS.getDsLink());
            jobConnection.setJdbcUrl(jdbcs);
            jobConnection.setTable(Arrays.asList(offlineSyncDomain.getSourceTable()));
            connections.add(jobConnection);
            parameter.setConnection(connections);
        } else if (DSType.MaxCompute.name().equals(fromDsType)) {
            DataX.JobContentReader reader = new DataX.JobContentReader();
            reader.setName(DataxReaderAndWriter.getReaderByName(fromDsType));
            //      JobContentReaderParameter parameter = new JobContentReaderParameter();
            MaxComputeReaderParameter parameter = new MaxComputeReaderParameter();
            reader.setMaxComputeReaderParameter(parameter);
            content.get(0).setReader(reader);
            // 设置列
            List<String> columns =
                offlineSyncDomain.getFieldMappings().stream()
                    .map(OfflineSyncDomain.FieldMapping::getSourceField)
                    .filter(StringUtils::isNotBlank)
                    .collect(Collectors.toList());
            parameter.setAccessId(sourceDS.getMaxComputeConfig().getAccessId());
            parameter.setAccessKey(sourceDS.getMaxComputeConfig().getAccessKey());
            parameter.setTables(Arrays.asList(offlineSyncDomain.getSourceTable()));
            parameter.setColumn(columns);
            parameter.setEndpoint(sourceDS.getMaxComputeConfig().getEndPoint());
            parameter.setTunnelEndPoint(sourceDS.getMaxComputeConfig().getTunnelEndPoint());
            parameter.setProject(sourceDS.getMaxComputeConfig().getProject());
        } else if (DSType.Text.name().equals(fromDsType)) {
            DataX.JobContentReader reader = new DataX.JobContentReader();
            reader.setName(DataxReaderAndWriter.getReaderByName(fromDsType));
            TextReaderParameter parameter = new TextReaderParameter();
            reader.setTextParameter(parameter);
            content.get(0).setReader(reader);
            // TODO filepath需要作为参数
            String filePath = offlineSyncDomain.getFilePath(); // 多个filepath之间使用,隔开
            List<String> filePaths = new ArrayList<>();
            if (org.apache.commons.lang.StringUtils.isNotBlank(filePath)) {
                String[] paths = filePath.split(",");
                for (String path : paths) {
                    File file = new File(path);
                    if (!file.exists()) {
                        continue;
                    }
                    if (file.isDirectory()) {
                        File[] files =
                            file.listFiles(
                                pathname -> {
                                    if (pathname.isFile()) {
                                        return true;
                                    }
                                    return false;
                                });
                        if (null != files && files.length > 0) {
                            for (File f : files) {
                                String absolutePath = f.getAbsolutePath();
                                absolutePath = absolutePath.replaceAll("\\\\", "/");
                                filePaths.add(absolutePath);
                            }
                        }
                    } else {
                        String absolutePath = file.getAbsolutePath();
                        absolutePath = absolutePath.replaceAll("\\\\", "/");
                        filePaths.add(absolutePath);
                    }
                }
            }
            parameter.setPath(filePaths);
            List<TbTaskFieldMapEntity> collect =
                offlineSyncDomain.getFieldMappings().stream()
                    .map(
                        fm -> {
                            TbTaskFieldMapEntity entity = new TbTaskFieldMapEntity();
                            entity.setSourceField(fm.getSourceField());
                            entity.setSourceFieldType(fm.getSourceFieldType());
                            entity.setTargetField(fm.getTargetField());
                            entity.setTargetFieldType(fm.getTargetFieldType());
                            entity.setIsSourceFieldPk(fm.getIsSourceFieldPk());
                            entity.setIsTargetFieldPk(fm.getIsTargetFieldPk());
                            return entity;
                        })
                    .collect(Collectors.toList());
            parameter.setColumnsNew(collect);
            // csv格式的文件要用GBK编码读取，否则会有乱码
            if (filePaths.get(0).endsWith(".csv")) {
                parameter.setEncoding("GBK");
            }
        }

        // 构建 4、writer
        String targetDsType = offlineSyncDomain.getTargetDsType();
        Integer ruleId = offlineSyncDomain.getDataReplaceRuleId();
        Long targetDsId = offlineSyncDomain.getTargetDsId();
        String targetTable = offlineSyncDomain.getTargetTable();
        Integer needTruncate = offlineSyncDomain.getIsTruncate();
        Integer pgText = offlineSyncDomain.getPgText();
        List<OfflineSyncDomain.FieldMapping> fieldMappings = offlineSyncDomain.getFieldMappings();
        List<OfflineSyncDomain.PartitionField> paritions = offlineSyncDomain.getParitions();

        List<DataX.JobContent> jobContents =
            buildWriter(
                content,
                targetDsType,
                ruleId,
                targetDsId,
                targetTable,
                needTruncate,
                fieldMappings,
                paritions,
                pgText,
                Optional.ofNullable(offlineSyncDomain.getBatchSize())
                    .orElse(dataXConfig.getBatchSize()));
        job.setContent(jobContents);
        // 5 where 条件
        String incFilter = offlineSyncDomain.getIncFilter();
        if (StringUtils.isNotEmpty(incFilter)) {
            job.getContent().get(0).getReader().getParameter().setWhere(incFilter);
        }
        dataX.setJob(job);
        return dataX;
    }

    /**
     * offline api, sync writer
     *
     * @param content
     * @param targetDsType
     * @param ruleId
     * @param targetDsId
     * @param targetTable
     * @param needTruncate
     * @param fieldMappings
     * @param paritions
     * @return
     */
    public List<DataX.JobContent> buildWriter(
        List<DataX.JobContent> content,
        String targetDsType,
        Integer ruleId,
        Long targetDsId,
        String targetTable,
        Integer needTruncate,
        List<OfflineSyncDomain.FieldMapping> fieldMappings,
        List<OfflineSyncDomain.PartitionField> paritions,
        Integer pgText,
        Integer batchSize) {
        DataSourceDetailDomain targetDS = dataSourceService.getDataSourceDetailById(targetDsId, false);
        // 设置连接
        List<JobWriterConnection> writeConnections = Lists.newArrayList();
        String tableName = null;
        // 关系型数据库
        if (DSType.PostgreSQL.name().equals(targetDsType)
            && Objects.nonNull(pgText)
            && pgText.equals(GP_FILE_INPUT)) {
            JobContentWriterParameter parameter = new JobContentWriterParameter();
            DataX.JobContentWriter writer = new DataX.JobContentWriter();
            writer.setName(DataxReaderAndWriter.GPONLYCSV.getWriter());
            // 创建path
            JobWriterConnection jobConnection = new JobWriterConnection();
            jobConnection.setJdbcUrl(targetDS.getDsLink());
            if (targetTable.contains(".")) {
                String[] names = targetTable.split("\\.", 2);
                targetTable = "\"" + names[0] + "\"" + ".\"" + names[1] + "\"";
            }
            jobConnection.setTable(Lists.newArrayList(targetTable));
            writeConnections.add(jobConnection);

            parameter.setConnection(writeConnections);
            parameter.setUsername(targetDS.getUsername());
            parameter.setPassword(targetDS.getPassword());
            String fileName = UUID.randomUUID().toString().replaceAll("-", "");
            parameter.setFileName(fileName);
            parameter.setWriteMode("truncate");
            String dir = fileStorageConfiguration.getDir();
            String gpPath = dataXConfig.getGpPath();
            dir = dir + gpPath;
            parameter.setPath(dir);
            parameter.setFileFormat("csv");
            writer.setParameter(parameter);
            content.get(0).setWriter(writer);
        } else if (DBUtil.checkRDBMS(targetDsType)) {
            boolean isMySql = false;
            if (targetDsType.equals(DSType.MySQL.name())) {
                isMySql = true;
                String version = targetDS.getRdbmsConfig().getVersion();
                if (StringUtils.isNotBlank(version) && version.startsWith("8.")) {
                    targetDsType = targetDsType + "8";
                }
            }
            String schema = targetDS.getRdbmsConfig().getSchema();
            JobWriterConnection jobConnection = new JobWriterConnection();
            jobConnection.setJdbcUrl(targetDS.getDsLink());
            String table = targetTable;
            tableName = targetTable;
            Integer isSystem = targetDS.getIsSystem();
            if (table.contains(".")
                && (targetDsType.equals(DSType.Oracle.name())
                || targetDsType.equals(DSType.Sap.name())
                || targetDsType.equals(DSType.DM.name())
                || targetDsType.equals(DSType.PostgreSQL.name())
                || targetDsType.equals(DSType.KINGBASE8.name()))) {
                String[] names = table.split("\\.", 2);
                table = "\"" + names[0] + "\"" + ".\"" + names[1] + "\"";
                schema = names[0];
                tableName = names[1];
            } else if (table.contains(".") && targetDsType.equals(DSType.SQLServer.name())) {
                String[] names = table.split("\\.", 2);
                table = names[0] + ".[" + names[1] + "]";
                schema = names[0];
                tableName = names[1];
            } else if (targetDsType.equals(DSType.MySQL.name()) && table.contains(".")) {
                table = "`" + table + "`";
            } else {
                if (table.contains("-")) {
                    table = "`" + table + "`";
                }
            }
            jobConnection.setTable(Lists.newArrayList(table));
            writeConnections.add(jobConnection);
            DataX.JobContentWriter writer = new DataX.JobContentWriter();
            writer.setName(DataxReaderAndWriter.getWriterByName(targetDsType));
            JobContentWriterParameter parameter = new JobContentWriterParameter();
            // TODO 执行前清空表数据 MySQL MySQL8 SQLServer PostgreSQL DB2 Sap Oracle
            boolean isTruncate = needTruncate != null && needTruncate == 1;
            if (isTruncate) {
                String truncateTable = "truncate table " + table;
                if (targetDsType.equals(DSType.DB2.name())) {
                    truncateTable += " IMMEDIATE";
                }
                parameter.setPreSql(Arrays.asList(truncateTable));
            }
            // 设置列
            List<String> columns = Lists.newArrayList();
            for (OfflineSyncDomain.FieldMapping field : fieldMappings) {
                if (targetDsType.equals(DSType.Oracle.name())
                    || targetDsType.equalsIgnoreCase(DSType.DM.name())
                    || targetDsType.equalsIgnoreCase(DSType.Sap.name())
                    || targetDsType.equalsIgnoreCase(DSType.PostgreSQL.name())
                    || targetDsType.equalsIgnoreCase(DSType.KINGBASE8.name())) {
                    columns.add("\"" + field.getTargetField() + "\"");
                } else if (targetDsType.equals(DSType.MySQL.name())
                    || targetDsType.equals(DSType.MySQL.name() + "8")) {
                    columns.add("`" + field.getTargetField() + "`");
                } else {
                    columns.add(field.getTargetField());
                }
            }
            parameter.setColumn(columns);
            parameter.setBatchSize(batchSize);
            // 设置合并 5: 更新 6：insert
            if (ruleId == 5) {
                if (isMySql) {
                    parameter.setWriteMode("replace");
                } else {
                    BaseDbManager dbManager =
                        getDbManager(
                            targetDsType,
                            targetDS.getUsername(),
                            targetDS.getPassword(),
                            targetDS.getDsLink());
                    List<FieldInfoDomain> fieldList = Lists.newArrayList();
                    List<String> psList = Lists.newArrayList();
                    try {
                        fieldList = dbManager.getFieldList(targetDS.getDbName(), schema, tableName);
                    } catch (SQLException e) {
                        log.info("获取表信息失败:{}", e.getMessage());
                    }

                    if (CollectionUtils.isNotEmpty(fieldList)) {
                        psList =
                            fieldList.stream()
                                .filter(e -> e.isPk())
                                .map(e -> e.getFieldName())
                                .collect(Collectors.toList());
                    }
                    parameter.setMerge(true);
                    List<String> pks = Lists.newArrayList();
                    for (String field : psList) {
                        if (targetDsType.equals(DSType.Oracle.name())
                            || targetDsType.equalsIgnoreCase(DSType.DM.name())
                            || targetDsType.equalsIgnoreCase(DSType.Sap.name())
                            || targetDsType.equalsIgnoreCase(DSType.PostgreSQL.name())
                            || targetDsType.equalsIgnoreCase(DSType.KINGBASE8.name())) {
                            pks.add("\"" + field + "\"");
                        } else if (targetDsType.equals(DSType.MySQL.name())
                            || targetDsType.equals(DSType.MySQL.name() + "8")) {
                            pks.add("`" + field + "`");
                        } else {
                            pks.add(field);
                        }
                    }
                    parameter.setPk(pks);
                }
            }
            parameter.setUsername(targetDS.getUsername());
            parameter.setPassword(targetDS.getPassword());
            parameter.setConnection(writeConnections);
            writer.setParameter(parameter);
            content.get(0).setWriter(writer);
        } else if (DSType.Hive.name().equals(targetDsType)) { // hive
            DataSourceManagerDomain managerInfo = dataSourceService.getManagerInfo(targetDsId);
            String dbName = managerInfo.getDbName();
            BaseDbManager dbManager = managerInfo.getDbManager();
            List<FieldInfoDomain> fieldList = Lists.newArrayList();
            try {
                fieldList = dbManager.getFieldList(dbName, targetTable);
            } catch (SQLException throwables) {
                log.error("离线同步，获取hive字段列表异常：", throwables);
                throw new BusinessException("离线同步，获取hive字段列表异常！");
            }
            //      List<String> partitions = Lists.newArrayList();
            List<String> allColumn = Lists.newArrayList();
            List<String> allColumnType = Lists.newArrayList();
            if (CollectionUtils.isNotEmpty(fieldList)) {
                fieldList.stream()
                    .forEach(
                        f -> {
                            allColumn.add(f.getFieldName());
                            allColumnType.add(f.getFieldType());
                  /*if (f.isPartition()) {
                    partitions.add(f.getFieldName());
                  }*/
                        });
            }
            List<String> tables = Arrays.asList(targetTable);
            DataX.JobContentWriter writer = new DataX.JobContentWriter();
            writer.setName(DataxReaderAndWriter.getWriterByName(targetDsType));
            JobContentWriterParameter parameter = new JobContentWriterParameter();
            // 设置列
            List<String> columns = Lists.newArrayList();
            fieldMappings.stream().forEach(f -> columns.add(f.getTargetField()));
            parameter.setColumn(columns);
            // 设置合并 1: 更新 2：insert
            if (ruleId == 1) {
                parameter.setWriteMode("overwrite");
            } else {
                parameter.setWriteMode("insert");
            }
            String url = targetDS.getDsLink();
            parameter.setUsername(targetDS.getUsername());
            parameter.setDbName(targetDS.getDbName());
            parameter.setKeyTabPath("");
            parameter.setKrb5Conf("");
            parameter.setPassword(
                StringUtils.isBlank(targetDS.getPassword()) ? "" : targetDS.getPassword());
            parameter.setBatchSize(batchSize);
            //      parameter.setHiveType(tDatasourceHive.getHiveType());
            if (!CollectionUtils.isEmpty(paritions)) {
                List<String> partitions =
                    paritions.stream().map(p -> p.getFieldName()).collect(Collectors.toList());
                List<String> partitionValues =
                    paritions.stream().map(p -> p.getFieldValue()).collect(Collectors.toList());
                parameter.setPartition(partitions);
                parameter.setPatitionValues(partitionValues);
            }
            parameter.setAllColumn(allColumn);
            parameter.setAllColumnType(allColumnType);

            String metastoreUris = "";
            String hiveSiteAddress = targetDS.getHiveConfig().getHiveSiteAddress();
            Document document = XmlUtil.readXML(new File(hiveSiteAddress));
            NodeList nodeList = document.getElementsByTagName("property");
            for (int i = 0; i < nodeList.getLength(); i++) {
                Node item = nodeList.item(i);
                Map<String, Object> propertyMap = XmlUtil.xmlToMap(item);
                String name = MapUtils.getString(propertyMap, "name");
                if ("hive.metastore.uris".equals(name)) {
                    metastoreUris = MapUtils.getString(propertyMap, "value");
                    break;
                }
            }
            if (StringUtils.isEmpty(metastoreUris)) {
                throw new BusinessException(

                    "读取数据源hive-site.xml文件hive.metastore.uris配置失败，请重新上传！");
            }
            // TODO hdfsurl可以不设置
            parameter.setHdfsUrl("hdfs://10.88.36.186:8021");
            parameter.setHiveMetastoreUrl(metastoreUris);
            // 设置连接
            JobWriterConnection jobConnection = new JobWriterConnection();
            jobConnection.setJdbcUrl(url);
            jobConnection.setTable(tables);
            writeConnections.add(jobConnection);
            parameter.setConnection(writeConnections);
            writer.setParameter(parameter);
            content.get(0).setWriter(writer);
        }
        return content;
    }

    @Deprecated
    public int buildAndExecuteShell(String taskId, String shellContent) throws IOException {
        // cmd
        String cmd = GlueTypeEnum.GLUE_SHELL.getCmd();
        GlueTypeEnum glueType = GlueTypeEnum.GLUE_SHELL;
        String dataxHome = dataXConfig.getHome();
        String dataxShellPath =
            String.join(File.separator, dataxHome, "shell", taskId + glueType.getSuffix());
        File dataxShellFile = new File(dataxShellPath);
        File parentFile = dataxShellFile.getParentFile();
        if (!parentFile.exists()) {
            parentFile.mkdirs();
        }
        if (dataxShellFile.exists()) {
            dataxShellFile.delete();
        }
        ScriptUtil.markScriptFile(dataxShellPath, shellContent);
        // log file
        String logFileName = XxlJobContext.getXxlJobContext().getJobLogFileName();

        // script params：0=param、1=分片序号、2=分片总数
        String[] scriptParams = new String[3];
        scriptParams[0] = Optional.ofNullable(XxlJobHelper.getJobParam()).orElse("");
        scriptParams[1] = String.valueOf(XxlJobContext.getXxlJobContext().getShardIndex());
        scriptParams[2] = String.valueOf(XxlJobContext.getXxlJobContext().getShardTotal());
        // invoke
        XxlJobHelper.log("----------- script file:" + dataxShellPath + " -----------");
        int exitValue = ScriptUtil.execToFile(cmd, dataxShellPath, logFileName, scriptParams);
        if (exitValue == 0) {
            XxlJobHelper.handleSuccess();
            return 0;
        } else {
            XxlJobHelper.handleFail("script exit value(" + exitValue + ") is failed");
            return exitValue;
        }
    }

    /**
     * 设置datax where+分区信息
     *
     * @param dataX
     * @return
     */
    public String setVars(DataX dataX) {
        // 变量设置
        String where = dataX.getJob().getContent().get(0).getReader().getParameter().getWhere();
        JobContentWriterParameter parameter =
            dataX.getJob().getContent().get(0).getWriter().getParameter();
        if (StringUtils.isNotBlank(where)) {
            where =
                VarParserUtil.getWhere(
                    where,
                    dataX.getJob().getContent().get(0).getWriter().getName(),
                    parameter.getConnection().get(0).getJdbcUrl(),
                    parameter.getUsername(),
                    parameter.getPassword(),
                    parameter.getKrb5Conf(),
                    parameter.getKeyTabPath());
            dataX.getJob().getContent().get(0).getReader().getParameter().setWhere(where);
        }

        // 分区设置
        if (CollectionUtils.isNotEmpty(parameter.getPatitionValues())) {
            List<String> pts = Lists.newArrayList();
            for (String pt : parameter.getPatitionValues()) {
                pt = VarParserUtil.process(pt, null, null);
                pts.add(pt);
            }
            dataX.getJob().getContent().get(0).getWriter().getParameter().setPatitionValues(pts);
        }

        return JSON.toJSONString(dataX, true);
    }

    /**
     * 设置datax where+分区信息
     *
     * @param dataX
     * @return
     */
    public String setVarsText(DataX dataX) {
        // 变量设置
        String where = dataX.getJob().getContent().get(0).getReader().getTextParameter().getWhere();
        JobContentWriterParameter parameter =
            dataX.getJob().getContent().get(0).getWriter().getParameter();
        if (StringUtils.isNotBlank(where)) {
            where =
                VarParserUtil.getWhere(
                    where,
                    dataX.getJob().getContent().get(0).getWriter().getName(),
                    parameter.getConnection().get(0).getJdbcUrl(),
                    parameter.getUsername(),
                    parameter.getPassword(),
                    parameter.getKrb5Conf(),
                    parameter.getKeyTabPath());
            dataX.getJob().getContent().get(0).getReader().getTextParameter().setWhere(where);
        }

        // 分区设置
        if (CollectionUtils.isNotEmpty(parameter.getPatitionValues())) {
            List<String> pts = Lists.newArrayList();
            for (String pt : parameter.getPatitionValues()) {
                pt = VarParserUtil.process(pt, null, null);
                pts.add(pt);
            }
            dataX.getJob().getContent().get(0).getWriter().getParameter().setPatitionValues(pts);
        }
        String JsonStr = JSON.toJSONString(dataX, true);

        return JsonStr.replace("textParameter", "parameter");
    }

    private String getDMDbName(Connection conn) throws SQLException {
        String database = null;
        String url = conn.getClientInfo().getProperty("url");
        Matcher matcher = dmJdbcPattern.matcher(url);
        if (matcher.find()) {
            matcher.reset();
            while (matcher.find()) {
                database = matcher.group(1);
            }
        }
        return database;
    }

    /**
     * dataX离线同步
     *
     * @param
     * @return
     */
    public DataX buildDataXTextJob(OfflineSyncDomain offlineSyncDomain) {
        // 1.初始化datax
        DataX dataX = new DataX();
        // 2.初始化 job
        DataX.Job job = new DataX.Job();
        // 2.1 初始化setting
        DataX.JobSetting setting = new DataX.JobSetting();
        DataX.JobSettingSpeed speed = new DataX.JobSettingSpeed();
        speed.setChannel(
            Optional.ofNullable(offlineSyncDomain.getChannel())
                .orElse(dataXConfig.getChannelNum()));
        setting.setSpeed(speed);
        if (offlineSyncDomain.getErrorNumLimit() != null && offlineSyncDomain.getErrorNumLimit() > 0) {
            DataX.JobSettingErrorLimit errorLimit = new DataX.JobSettingErrorLimit();
            errorLimit.setRecord(offlineSyncDomain.getErrorNumLimit());
            setting.setErrorLimit(errorLimit);
        }
        job.setSetting(setting);
        // 2.2 初始化 jobcontent
        List<DataX.JobContent> content = Lists.newArrayList();
        content.add(new DataX.JobContent());
        job.setContent(content);

        // 构建 3、reader
        if (DSType.Text.name().equals(offlineSyncDomain.getSourceDsType())) {

            String filePath = offlineSyncDomain.getFilePath();

            List<String> filePaths = new ArrayList<>();

            if (StringUtils.isNotBlank(filePath)) {
                String[] paths = filePath.split(",");

                for (String path : paths) {
                    File file = new File(path);

                    if (!file.exists()) {
                        continue;
                    }

                    if (file.isDirectory()) {
                        File[] files =
                            file.listFiles(
                                pathname -> {
                                    if (pathname.isFile()) {
                                        return true;
                                    }
                                    return false;
                                });
                        if (null != files && files.length > 0) {
                            for (File f : files) {
                                String absolutePath = f.getAbsolutePath();
                                absolutePath = absolutePath.replaceAll("\\\\", "/");
                                filePaths.add(absolutePath);
                            }
                        }
                    } else {
                        String absolutePath = file.getAbsolutePath();
                        absolutePath = absolutePath.replaceAll("\\\\", "/");
                        filePaths.add(absolutePath);
                    }
                }
            }

            List<TbTaskFieldMapEntity> tbTaskFieldMapEntities =
                tbTaskFieldMapEntityMapper.map(offlineSyncDomain.getFieldMappings());
            job =
                job.buildTextReader(
                    filePaths,
                    tbTaskFieldMapEntities,
                    offlineSyncDomain.getSkipCount(),
                    offlineSyncDomain.getEncodeFormat());
        }

        // 构建 4、writer
        String targetDsType = offlineSyncDomain.getTargetDsType();
        Integer ruleId = offlineSyncDomain.getDataReplaceRuleId();
        DataSourceDetailDomain targetDS =
            dataSourceService.getDataSourceDetailById(offlineSyncDomain.getTargetDsId(), false);
        // 设置连接
        List<JobWriterConnection> writeConnections = Lists.newArrayList();

        // 关系型数据库
        if (DBUtil.checkRDBMS(targetDsType)) {
            boolean isMySql = false;
            if (targetDsType.equals(DSType.MySQL.name())) {
                isMySql = true;
                String version = targetDS.getRdbmsConfig().getVersion();
                if (StringUtils.isNotBlank(version) && version.startsWith("8.")) {
                    targetDsType = targetDsType + "8";
                }
            }

            JobWriterConnection jobConnection = new JobWriterConnection();
            jobConnection.setJdbcUrl(targetDS.getDsLink());
            String table = offlineSyncDomain.getTargetTable();
            if (targetDsType.equals(DSType.Oracle.name())) {
                String[] names = table.split("\\.");
                table = names[0] + ".\"" + names[1] + "\"";
            } else if (targetDsType.equals(DSType.PostgreSQL.name()) || targetDsType.equals(
                DSType.KINGBASE8.name()) || targetDsType.equals(DSType.DM.name())) {
                table = "\"" + offlineSyncDomain.getSchema() + "\"." + "\"" + table + "\"";
            } else if (targetDsType.equals(DSType.Sap.name())) {
                table = "\"" + offlineSyncDomain.getSchema() + "\"." + "\"" + table + "\"";
            } else {
                if (table.contains("-")) {
                    table = "`" + table + "`";
                }
            }

            jobConnection.setTable(Lists.newArrayList(table));
            writeConnections.add(jobConnection);

            DataX.JobContentWriter writer = new DataX.JobContentWriter();
            writer.setName(DataxReaderAndWriter.getWriterByName(targetDsType));
            JobContentWriterParameter parameter = new JobContentWriterParameter();

            // TODO 执行前清空表数据 MySQL MySQL8 SQLServer PostgreSQL DB2 Sap Oracle
            if (ruleId == 1) {
                // 对于文件的datax任务，只有两种形式。1是清除 2是插入。我现在统一用前端传的dataReplaceRuleId来判断
                offlineSyncDomain.setIsTruncate(1);
            }
            boolean isTruncate =
                offlineSyncDomain.getIsTruncate() != null && offlineSyncDomain.getIsTruncate() == 1;
            if (isTruncate) {
                String truncateTable = "truncate table " + table;
                if (targetDsType.equals(DSType.DB2.name())) {
                    truncateTable += " IMMEDIATE";
                }
                // System.out.println("truncate table \"public\".\"ods_file_import_8oE4\"");
                parameter.setPreSql(Arrays.asList(truncateTable));
            }

            // 设置列
            List<String> columns = Lists.newArrayList();
            for (OfflineSyncDomain.FieldMapping field : offlineSyncDomain.getFieldMappings()) {
                if (targetDsType.equals(DSType.Oracle.name())
                    || targetDsType.equalsIgnoreCase(DSType.DM.name())) {
                    columns.add("\"" + field.getTargetField() + "\"");
                } else if (targetDsType.equals(DSType.MySQL.name())
                    || targetDsType.equals(DSType.MySQL.name() + "8")) {
                    columns.add("`" + field.getTargetField() + "`");
                } else {
                    columns.add(field.getTargetField());
                }
            }
            parameter.setColumn(columns);
            parameter.setBatchSize(
                Optional.ofNullable(offlineSyncDomain.getBatchSize())
                    .orElse(dataXConfig.getBatchSize()));

            // 设置合并 5: 更新 6：insert
            if (ruleId == 5) {
                if (isMySql) {
                    parameter.setWriteMode("replace");
                } else {
                    parameter.setMerge(true);
                    List<String> pks = Lists.newArrayList();
                    for (OfflineSyncDomain.FieldMapping field : offlineSyncDomain.getFieldMappings()) {
                        if (field.getIsTargetFieldPk().equals(1)) {
                            if (targetDsType.equals(DSType.Oracle.name())
                                || targetDsType.equalsIgnoreCase(DSType.DM.name())) {
                                pks.add("\"" + field.getTargetField() + "\"");
                            } else {
                                pks.add(field.getTargetField());
                            }
                        }
                    }
                    parameter.setPk(pks);
                }
            }
            parameter.setUsername(targetDS.getUsername());
            parameter.setPassword(targetDS.getPassword());

            parameter.setConnection(writeConnections);
            writer.setParameter(parameter);
            content.get(0).setWriter(writer);

        } else if (DSType.Hive.name().equals(targetDsType)) { // hive

            DataSourceManagerDomain managerInfo =
                dataSourceService.getManagerInfo(offlineSyncDomain.getTargetDsId());
            String dbName = managerInfo.getDbName();
            BaseDbManager dbManager = managerInfo.getDbManager();
            //      TableInfoDomain tableInfoByTableName =
            //          dbManager.getTableInfoByTableName(dbName, tableMapping.getTargetTable());
            List<FieldInfoDomain> fieldList = Lists.newArrayList();
            try {
                fieldList = dbManager.getFieldList(dbName, offlineSyncDomain.getTargetTable());
            } catch (SQLException throwables) {
                log.error("离线同步，获取hive字段列表异常：", throwables);
                throw new BusinessException("离线同步，获取hive字段列表异常！");
            }

            List<String> partitions = Lists.newArrayList();
            List<String> allColumn = Lists.newArrayList();
            List<String> allColumnType = Lists.newArrayList();

            if (CollectionUtils.isNotEmpty(fieldList)) {
                fieldList.stream()
                    .forEach(
                        f -> {
                            allColumn.add(f.getFieldName());
                            allColumnType.add(f.getFieldType());
                            if (f.getIsPartition()) {
                                partitions.add(f.getFieldName());
                            }
                        });
            }

            List<String> tables = Arrays.asList(offlineSyncDomain.getTargetTable());
            List<String> paritionValues = Lists.newArrayList();
            //      tableMapping.getParitionFields().stream().forEach(p ->
            // paritionValues.add(p.getFieldValue()));

            DataX.JobContentWriter writer = new DataX.JobContentWriter();
            writer.setName(DataxReaderAndWriter.getWriterByName(targetDsType));
            JobContentWriterParameter parameter = new JobContentWriterParameter();
            // 设置列
            List<String> columns = Lists.newArrayList();
            offlineSyncDomain.getFieldMappings().stream().forEach(f -> columns.add(f.getTargetField()));

            parameter.setColumn(columns);
            // 设置合并 1: 更新 2：insert
            if (ruleId == 1) {
                parameter.setWriteMode("overwrite");
            } else {
                parameter.setWriteMode("insert");
            }
            String url = targetDS.getDsLink();
            parameter.setUsername(targetDS.getUsername());
            parameter.setDbName(targetDS.getDbName());
            parameter.setKeyTabPath("");
            parameter.setKrb5Conf("");
            parameter.setPassword(
                StringUtils.isBlank(targetDS.getPassword()) ? "" : targetDS.getPassword());

            parameter.setBatchSize(
                Optional.ofNullable(offlineSyncDomain.getBatchSize())
                    .orElse(dataXConfig.getBatchSize()));

            //      parameter.setHiveType(tDatasourceHive.getHiveType());

            if (!org.springframework.util.CollectionUtils.isEmpty(partitions)) {
                List<String> partition = Lists.newArrayList();
                for (String field : partitions) {
                    partition.add(field);
                }
                parameter.setPartition(partition);
            }

            parameter.setAllColumn(allColumn);
            parameter.setAllColumnType(allColumnType);
            parameter.setPatitionValues(paritionValues);

            String metastoreUris = "";
            String hiveSiteAddress = targetDS.getHiveConfig().getHiveSiteAddress();
            Document document = XmlUtil.readXML(new File(hiveSiteAddress));
            NodeList nodeList = document.getElementsByTagName("property");
            for (int i = 0; i < nodeList.getLength(); i++) {
                Node item = nodeList.item(i);
                Map<String, Object> propertyMap = XmlUtil.xmlToMap(item);
                String name = MapUtils.getString(propertyMap, "name");
                if ("hive.metastore.uris".equals(name)) {
                    metastoreUris = MapUtils.getString(propertyMap, "value");
                    break;
                }
            }
            if (StringUtils.isEmpty(metastoreUris)) {
                throw new BusinessException(

                    "读取数据源hive-site.xml文件hive.metastore.uris配置失败，请重新上传！");
            }
            // TODO hdfsurl可以不设置
            parameter.setHdfsUrl("hdfs://10.88.36.186:8021");
            parameter.setHiveMetastoreUrl(metastoreUris);
            //      parameter.setNamenodes(hdfsNamenodes);
            //      parameter.setNamenodesAddrs(hdfsNamenodesaddr);
            //      parameter.setNameServices(hdfsNameservices);

            // 设置连接
            JobWriterConnection jobConnection = new JobWriterConnection();
            jobConnection.setJdbcUrl(url);
            jobConnection.setTable(tables);
            writeConnections.add(jobConnection);
            parameter.setConnection(writeConnections);

            writer.setParameter(parameter);
            content.get(0).setWriter(writer);
        }

        // 5 where 条件
        if (offlineSyncDomain.getIsInc() != null && offlineSyncDomain.getIsInc().equals(1)) {
            job.getContent().get(0).getReader().getParameter().setWhere(offlineSyncDomain.getIncFilter());
        }
        dataX.setJob(job);
        return dataX;
    }

    public void stopOfflineSync(Long logId) {
        DataXRunner dataXRunner = taskCache.get(logId);
        if (null != dataXRunner) {
            dataXRunner.stop();
            taskCache.remove(logId);
        } else {
            // 改变状态
            OfflineSyncRunningRecordDomain update =
                OfflineSyncRunningRecordDomain.builder()
                    .endTime(new Date())
                    .taskStatus(OfflineSyncTaskStatus.stopped.getTaskStatus())
                    .build();
            update.setId(logId);
            offlineSyncRunningRecordService.updateRunningRecord(update);

            offlineSyncService.updateRunningRecord(logId);
        }
    }

    private static BaseDbManager getDbManager(
        String dsType, String username, String password, String jdbcLink) {
        Database database = Database.from(dsType);
        DbOptions dbOptions = new DbOptions(database, jdbcLink, username, password);
        return DbManagerFactory.getDbManager(database, dbOptions);
    }

    /**
     * 通过logId获取IP,
     */
    public String getIpInfoByLogId() {
        try {
            InetAddress localHost = InetAddress.getLocalHost();
            return localHost.getHostAddress();
        } catch (UnknownHostException e) {
            log.error("获取本地ip失败:{}", e.getMessage());
        }
        return "localhost";
    }
}
