package com.sh.data.engine.domain.integration.online.service;

import cn.hutool.core.convert.Convert;
import cn.hutool.crypto.digest.MD5;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.jayway.jsonpath.Configuration;
import com.jayway.jsonpath.Option;
import com.sh.data.engine.common.enumDefinition.DSType;
import com.sh.data.engine.common.enumDefinition.HiveSiteKeyEnum;
import com.sh.data.engine.common.enumDefinition.OfflineSyncTaskStatus;
import com.sh.data.engine.common.exception.BusinessException;
import com.sh.data.engine.common.util.*;
import com.sh.data.engine.domain.authority.service.DataAuthService;
import com.sh.data.engine.domain.base.model.PageResult;
import com.sh.data.engine.domain.base.model.UserContext;
import com.sh.data.engine.domain.base.model.enums.Database;
import com.sh.data.engine.domain.base.model.enums.DirectoryMenuEnum;
import com.sh.data.engine.domain.base.model.enums.RightEnum;
import com.sh.data.engine.domain.base.service.LogService;
import com.sh.data.engine.domain.common.service.DirectoryService;
import com.sh.data.engine.domain.dataasset.model.domain.TableInfoBizDomain;
import com.sh.data.engine.domain.dataasset.model.param.QueryTableParam;
import com.sh.data.engine.domain.dataasset.service.TableService;
import com.sh.data.engine.domain.datadev.offline.model.domain.OnlineTaskLogDomain;
import com.sh.data.engine.domain.datadev.offline.model.enums.OfflineDevJobStatusEnum;
import com.sh.data.engine.domain.integration.datasource.model.domain.*;
import com.sh.data.engine.domain.integration.datasource.service.DataSourceService;
import com.sh.data.engine.domain.integration.online.model.domain.*;
import com.sh.data.engine.domain.integration.online.model.param.OnlineTaskParam;
import com.sh.data.engine.domain.integration.online.model.param.PreviewDataForMqParam;
import com.sh.data.engine.domain.integration.online.model.param.StartOnlineKafkaTaskParam;
import com.sh.data.engine.domain.shims.DbManagerFactory;
import com.sh.data.engine.domain.shims.db.BaseDbManager;
import com.sh.data.engine.domain.shims.db.DbOptions;
import com.sh.data.engine.domain.shims.db.model.FieldInfoDomain;
import com.sh.data.engine.domain.shims.db.model.TableInfoDomain;
import com.sh.data.engine.domain.shims.hbase.manager.HBaseManager;
import com.sh.data.engine.domain.shims.hive.manager.HiveManager;
import com.sh.data.engine.domain.shims.influxdb.manager.InfluxDBManager;
import com.sh.data.engine.domain.shims.iotdb.domain.DeviceDomain;
import com.sh.data.engine.domain.shims.iotdb.manager.IoTDBSessionManager;
import com.sh.data.engine.domain.shims.kafka.util.KafkaUtil;
import com.sh.data.engine.domain.shims.mongo.manager.MongoManager;
import com.sh.data.engine.domain.shims.mqtt.util.MQTTUtil;
import com.sh.data.engine.domain.util.ConvertUtil;
import com.sh.data.engine.domain.util.LogUtil;
import com.sh.data.engine.domain.workspace.manager.service.UserService;
import com.sh.data.engine.infrastructure.config.FileStorageConfiguration;
import com.sh.data.engine.infrastructure.config.FlinkConfig;
import com.sh.data.engine.infrastructure.config.LogConfig;
import com.sh.data.engine.job.admin.core.model.XxlJobInfo;
import com.sh.data.engine.job.admin.core.route.ExecutorRouteStrategyEnum;
import com.sh.data.engine.job.admin.core.scheduler.MisfireStrategyEnum;
import com.sh.data.engine.job.admin.core.scheduler.ScheduleTypeEnum;
import com.sh.data.engine.job.admin.service.XxlJobService;
import com.sh.data.engine.job.core.biz.model.ReturnT;
import com.sh.data.engine.job.core.enums.ExecutorBlockStrategyEnum;
import com.sh.data.engine.job.core.glue.GlueTypeEnum;
import com.sh.data.engine.job.core.util.CmdUtil;
import com.sh.data.engine.repository.dao.common.entity.DirectoryEntity;
import com.sh.data.engine.repository.dao.integration.datasource.entity.DataSourceEntity;
import com.sh.data.engine.repository.dao.integration.online.FlinkDataSyncTaskMapper;
import com.sh.data.engine.repository.dao.integration.online.entity.FlinkDataSyncTaskEntity;
import com.sh.data.engine.repository.dao.integration.online.entity.FlinkDataSyncTaskEntity.FieldMapping;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.buildobjects.process.ProcBuilder;
import org.buildobjects.process.ProcResult;
import org.openjdk.nashorn.api.scripting.ScriptObjectMirror;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import javax.script.Invocable;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import javax.script.ScriptException;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;

import static com.jayway.jsonpath.JsonPath.using;

@Service
@Slf4j
public class FlinkDataSyncService
    extends ServiceImpl<FlinkDataSyncTaskMapper, FlinkDataSyncTaskEntity> {

    @Autowired
    private FlinkDataSyncTaskMapper flinkDataSyncTaskMapper;

    @Autowired
    private XxlJobService xxlJobService;

    @Autowired
    private DataSourceService dataSourceService;

    @Autowired
    private FlinkDataSyncExecService flinkDataSyncExecService;

    @Autowired
    private UserService userService;

    @Autowired
    private DirectoryService directoryService;

    @Autowired
    private LogService logService;

    @Autowired
    private DataAuthService dataAuthService;

    @Autowired
    private TableService tableService;

    @Autowired
    private FlinkConfig flinkConfig;

    @Autowired
    private FileStorageConfiguration fileStorageConfiguration;

    @Value("${xxl.job.executor.logpath:/data/data_engine_file_storage/xxl-job/jobhandler}")
    private String xxlJobLogPath;

    @Autowired
    private LogConfig logConfig;


    @Value("${data.engine.onlineTaskClient.clientHome:/data/onlinetask_client}")
    private String clientHome;

    @Value("${data.engine.realTimePointMappingCsvFile:/data/onlinetask_client/real_time_point_mapping.csv}")
    private String realTimePointMappingCsvFile;

    private static final List<String> SOURCE =
        Lists.newArrayList(
            DSType.MySQL.name(),
            DSType.TiDB.name(),
            DSType.PostgreSQL.name(),
            DSType.MatrixDB.name(),
            DSType.Oracle.name(),
            DSType.Mongodb.name(),
            DSType.Kafka.name(),
            DSType.MQTT.name());

    private static final List<String> TARGET =
        Lists.newArrayList(
            DSType.MySQL.name(),
            DSType.TiDB.name(),
            DSType.PostgreSQL.name(),
            DSType.MatrixDB.name(),
            DSType.Kafka.name(),
            DSType.Hive.name(),
            DSType.MQTT.name(),
            DSType.IoTDB.name(),
            DSType.InfluxDB.name());

    private static final List<String> TARGET_FOR_CDC =
        Lists.newArrayList(DSType.MySQL.name(), DSType.PostgreSQL.name(), DSType.Kafka.name());

    public List<FlinkDatabaseDomain> getDatabasesByDsType(String dsType) {
        Long projectId = ServletUtils.getProjectId();
        DataSourceQueryDomain queryDomain =
            DataSourceQueryDomain.builder().projectId(projectId).dsTypes(Arrays.asList(dsType)).build();
        List<DataSourceMultiDomain> datasources = dataSourceService.getDSByParams(queryDomain);
        List<FlinkDatabaseDomain> flinkDatabaseDomains =
            Convert.toList(FlinkDatabaseDomain.class, datasources);
        return flinkDatabaseDomains;
    }

    public List<FlinkDatabaseDomain> getDataBaseByDirection(Integer direction, String dsType) {
        Long projectId = ServletUtils.getProjectId();
        if (Objects.isNull(direction)) {
            throw new BusinessException("请指定数据源为来源还是目标类型");
        }
        DataSourceQueryDomain query = null;
        // TODO
        if (direction == 0) {
            query = DataSourceQueryDomain.builder().dsTypes(SOURCE).projectId(projectId).build();
        } else {
            if (StringUtils.isNotBlank(dsType)) {
                if (DSType.from(dsType).equals(DSType.MQTT) || DSType.from(dsType).equals(DSType.Kafka)) {
                    query = DataSourceQueryDomain.builder().dsTypes(TARGET).projectId(projectId).build();
                }
            } else {
                query = DataSourceQueryDomain.builder().dsTypes(TARGET).projectId(projectId).build();
            }
        }
        List<DataSourceMultiDomain> dsByParams = dataSourceService.getDSByParams(query);
        ArrayList<FlinkDatabaseDomain> domians = Lists.newArrayList();
        dsByParams.forEach(
            i -> {
                FlinkDatabaseDomain flinkDatabaseDomain =
                    ConvertUtil.copyProperties(i, FlinkDatabaseDomain.class);
                flinkDatabaseDomain.setDsId(String.valueOf(i.getId()));
                domians.add(flinkDatabaseDomain);
            });
        return domians;
    }

    public List<FlinkTableDomain> getTables(Long dsId, String schema, Integer direction) {
        Long projectId = ServletUtils.getProjectId();
        String userId = UserContext.getUserId();
        DataSourceManagerDomain managerInfo = dataSourceService.getManagerInfo(dsId);
        if (Objects.isNull(managerInfo)) {
            throw new BusinessException("获取数据源信息失败，请检查后重试！");
        }
        List<TableInfoDomain> tableAndViewList = Lists.newArrayList();
        BaseDbManager baseDbManager = managerInfo.getDbManager();
        MongoManager mongoManager = managerInfo.getMongoManager();
        HBaseManager hBaseManager = managerInfo.getHBaseManager();
        IoTDBSessionManager ioTDBSessionManager = managerInfo.getIoTDBSessionManager();
        InfluxDBManager influxDBManager = managerInfo.getInfluxDBManager();
        String dbName = managerInfo.getDbName();
        try {
            if (!Objects.isNull(baseDbManager)) { // base
                // if (!DSType.Sap.name().equalsIgnoreCase(managerInfo.getDsType())) { // hana表数据量大，直接返回空
                tableAndViewList = baseDbManager.getTableAndViewList(dbName, schema);
                // }
            } else if (!Objects.isNull(influxDBManager)) {
                tableAndViewList = influxDBManager.getTableAndViewList(dbName);
            } else if (!Objects.isNull(mongoManager)
                || !Objects.isNull(hBaseManager)
                || !Objects.isNull(ioTDBSessionManager)) { // mongo or hbase or iotdb
                List<String> collectionNames = null;
                if (!Objects.isNull(mongoManager)) {
                    collectionNames = mongoManager.getCollectionList();
                } else if (!Objects.isNull(hBaseManager)) {
                    collectionNames = hBaseManager.listCollectionNames();
                } else if (!Objects.isNull(ioTDBSessionManager)) {
                    List<DeviceDomain> devices = ioTDBSessionManager.getDeviceList("");
                    collectionNames =
                        devices.stream().map(device -> device.getDevice()).collect(Collectors.toList());
                }
                tableAndViewList =
                    collectionNames.stream()
                        .map(
                            c -> {
                                TableInfoDomain tableInfo = new TableInfoDomain();
                                tableInfo.setTableName(c);
                                tableInfo.setTblType(TableInfoDomain.TblType.MANAGED_TABLE);
                                return tableInfo;
                            })
                        .collect(Collectors.toList());
            }
        } catch (Exception throwables) {
            log.error("实时同步，获取数据库表异常", throwables);
            throw new BusinessException(
                "实时同步，获取数据库[" + dbName + "]表异常，错误原因:" + throwables.getMessage()
                    + ",请检查数据库连接是否正常！");
        }
        Integer isSystem = managerInfo.getIsSystem();
        if (Objects.nonNull(isSystem) && isSystem.equals(1)) {
            Iterator<TableInfoDomain> iterator = tableAndViewList.iterator();
            List<TableInfoBizDomain> tableList =
                tableService.getTableList(projectId, new QueryTableParam());
            Map<String, Long> mapName =
                tableList.stream()
                    .collect(
                        Collectors.toMap(TableInfoBizDomain::getTableName, TableInfoBizDomain::getTblId));
            List<Long> tbInfoIds = tableList.stream().map(e -> e.getTblId()).collect(Collectors.toList());
            Map<Long, String> tblAuthMap = dataAuthService.getTblAuthMap(projectId, userId, tbInfoIds);
            while (iterator.hasNext()) {
                TableInfoDomain next = iterator.next();
                String tableName = next.getTableName();
                Long tblId = mapName.get(tableName);
                if (Objects.isNull(tblId)) {
                    continue;
                }
                String tblAuth = tblAuthMap.get(tblId);
                if (StringUtils.isBlank(tblAuth)) {
                    continue;
                }
                // 来源：无读写权限，过滤掉
                if (Objects.nonNull(direction)
                    && direction == 0
                    && RightEnum.NONE.getCode().equals(tblAuth)) {
                    iterator.remove();
                }
                // 目标：无写权限，过滤掉
                if (Objects.nonNull(direction)
                    && direction == 1
                    && !RightEnum.RW.getCode().equals(tblAuth)) {
                    iterator.remove();
                }
            }
        }

        return tableAndViewList.stream()
            .map(this::parseToFlinkTableDomain)
            .sorted(Comparator.comparing(FlinkTableDomain::getTableName))
            .collect(Collectors.toList());
    }

    public List<FlinkTableFieldDomain> getTableFields(Long dsId, String tableName) {

        DataSourceManagerDomain managerInfo = dataSourceService.getManagerInfo(dsId);
        if (Objects.isNull(managerInfo)) {
            throw new BusinessException("获取数据源信息失败，请检查后重试！");
        }
        BaseDbManager dbManager = managerInfo.getDbManager();
        IoTDBSessionManager ioTDBSessionManager = managerInfo.getIoTDBSessionManager();
        InfluxDBManager influxDBManager = managerInfo.getInfluxDBManager();
        List<FieldInfoDomain> fieldList = null;

        if (Objects.nonNull(dbManager)) {
            String dbName = managerInfo.getDbName();
            String schema = StringUtils.EMPTY;
            if (managerInfo.getDsType().equalsIgnoreCase(DSType.PostgreSQL.name())
                || managerInfo.getDsType().equalsIgnoreCase(DSType.Oracle.name())
                || managerInfo.getDsType().equalsIgnoreCase(DSType.MatrixDB.name())) {
                String[] split = tableName.split("\\.", 2);
                schema = split[0];
                tableName = split[1];
            }

            try {
                if (StringUtils.isNotBlank(schema)) {
                    fieldList = dbManager.getFieldList(dbName, schema, tableName);
                } else {
                    fieldList = dbManager.getFieldList(dbName, tableName);
                }
            } catch (SQLException throwables) {
                log.error("获取表" + tableName + "字段信息失败", throwables);
                throw new BusinessException("获取表" + tableName + "字段信息失败");
            }
        } else if (Objects.nonNull(ioTDBSessionManager)) {
            try {
                fieldList = ioTDBSessionManager.getFiledList(String.join(".", tableName, "**"));
            } catch (Exception e) {
                log.error("获取device" + tableName + "测点信息失败", e);
                throw new BusinessException("获取device" + tableName + "测点信息失败");
            }
        } else if (Objects.nonNull(influxDBManager)) {
            String dbName = managerInfo.getDbName();
            try {
                fieldList = influxDBManager.getFieldList(dbName, tableName);
            } catch (Exception e) {
                log.error("获取表" + tableName + "字段信息失败", e);
                throw new BusinessException("获取表" + tableName + "字段信息失败");
            }
        }

        return Convert.toList(FlinkTableFieldDomain.class, fieldList);
    }

    //    @BizDataIndexAnnotation(
//        type = BizDataIndexTypeEnum.ONLINE_TASK,
//        dataIdFieldName = "id",
//        dataIdParamIndex = 0,
//        operateType = BizDataIndexAnnotation.OperateType.SAVE)

    /**
     * getid() 是否为null
     *
     * @param flinkDataSyncTask
     */
    public void insert(FlinkDataSyncTaskDomain flinkDataSyncTask) {
        Date now = new Date();
        // String id = new ObjectId().toString();
        String userId = UserContext.getUserId();
        Long projectId = ServletUtils.getProjectId();

        if (Objects.isNull(flinkDataSyncTask.getId())
            && !flinkDataSyncTask.getSourceDsType().equalsIgnoreCase(DSType.Kafka.name())
            && !flinkDataSyncTask.getSourceDsType().equalsIgnoreCase(DSType.MQTT.name())) {
            boolean check = checkTaskName(flinkDataSyncTask.getTaskName());
            if (!check) {
                throw new BusinessException("任务名重复");
            }
            // 保存xxl-job
            XxlJobInfo xxlJobInfo = new XxlJobInfo();

            xxlJobInfo.setJobGroup(1);
            xxlJobInfo.setJobDesc("实时同步_" + flinkDataSyncTask.getTaskName() + "_");
            xxlJobInfo.setAddTime(now);
            xxlJobInfo.setUpdateTime(now);
            xxlJobInfo.setAuthor("1");
            xxlJobInfo.setScheduleType(ScheduleTypeEnum.NONE.name());

            xxlJobInfo.setTriggerStatus(0); // 冻结
            xxlJobInfo.setExecutorRouteStrategy(ExecutorRouteStrategyEnum.ROUND.name()); // 路由策略
            xxlJobInfo.setMisfireStrategy(MisfireStrategyEnum.DO_NOTHING.name()); // 调度过期策略
            xxlJobInfo.setExecutorBlockStrategy(ExecutorBlockStrategyEnum.DISCARD_LATER.name()); // 阻塞处理策略
            xxlJobInfo.setExecutorParam(StringUtils.EMPTY); // 不设置的话执行器 执行的cmdArray的时候会有null报空指针
            xxlJobInfo.setExecutorFailRetryCount(0); // 失败重试次数
            xxlJobInfo.setExecutorTimeout(0); // 任务超时时间
            xxlJobInfo.setGlueType(GlueTypeEnum.GLUE_SHELL.name());
            xxlJobInfo.setGlueSource(flinkConfig.getCommand());
            xxlJobInfo.setGlueRemark(StringUtils.EMPTY);
            xxlJobInfo.setGlueUpdatetime(now);
            ReturnT<String> add = xxlJobService.add(xxlJobInfo);
            if (ReturnT.SUCCESS_CODE != add.getCode()) {
                log.error("添加xxlJob任务失败:{}", add.getMsg());
                throw new BusinessException(add.getMsg());
            }

            // flinkDataSyncTask.setId(id);
            flinkDataSyncTask.setXxlJobInfoId(xxlJobInfo.getId());
            flinkDataSyncTask.setCreatorId(userId);
            flinkDataSyncTask.setUpdaterId(userId);
            flinkDataSyncTask.setProjectId(projectId);
            flinkDataSyncTask.setTaskStatus(OfflineSyncTaskStatus.torelease.getTaskStatus());
            FlinkDataSyncTaskEntity taskEntity =
                Convert.convert(FlinkDataSyncTaskEntity.class, flinkDataSyncTask);
            flinkDataSyncTaskMapper.insert(taskEntity);
        } else if (Objects.isNull(flinkDataSyncTask.getId())
            && (flinkDataSyncTask.getSourceDsType().equalsIgnoreCase(DSType.Kafka.name())
            || flinkDataSyncTask.getSourceDsType().equalsIgnoreCase(DSType.MQTT.name()))) {
            boolean check = checkTaskName(flinkDataSyncTask.getTaskName());
            if (!check) {
                throw new BusinessException("任务名重复");
            }
            // flinkDataSyncTask.setId(id);
            flinkDataSyncTask.setCreatorId(userId);
            flinkDataSyncTask.setUpdaterId(userId);
            flinkDataSyncTask.setProjectId(projectId);
            flinkDataSyncTask.setTaskStatus(OfflineSyncTaskStatus.torelease.getTaskStatus());
            FlinkDataSyncTaskEntity convert =
                Convert.convert(FlinkDataSyncTaskEntity.class, flinkDataSyncTask);
            // 使用默认解析时，将jsonPath表达式 塞到字段映射中保存(目标不是kafka、mqtt)
            if (convert.getUseJsFlag() == 0
                && !convert.getTargetDsType().equalsIgnoreCase(DSType.Kafka.name())
                && !convert.getTargetDsType().equalsIgnoreCase(DSType.MQTT.name())) {

                List<FlinkDataSyncTaskEntity.FieldMapping> fieldMappings = convert.getFieldMappings();
                ;

                List<FlinkDataSyncTaskEntity.FiledExpess> fieldExpressRequests =
                    convert.getFieldExpressRequests();
                Map<String, String> fieldExpMap =
                    fieldExpressRequests.stream()
                        .collect(
                            Collectors.toMap(
                                data -> data.getFieldName(),
                                data -> data.getFieldExpress(),
                                (key1, key2) -> key1));
                for (FlinkDataSyncTaskEntity.FieldMapping fieldMapping : fieldMappings) {
                    String sourceField = fieldMapping.getSourceField();
                    boolean flag = fieldExpMap.containsKey(sourceField);
                    if (flag) {
                        fieldMapping.setSourceFieldExpress(fieldExpMap.get(sourceField));
                    }
                }
            }
            flinkDataSyncTaskMapper.insert(convert);

        } else {
            flinkDataSyncTask.setUpdateTime(now);
            flinkDataSyncTask.setUpdaterId(userId);
            FlinkDataSyncTaskEntity update =
                Convert.convert(FlinkDataSyncTaskEntity.class, flinkDataSyncTask);
            flinkDataSyncTaskMapper.updateById(update);
        }
    }

    /**
     * 发布
     */
    @SneakyThrows
    public void publish(StartOnlineKafkaTaskParam param) {
        FlinkDataSyncTaskEntity flinkDataSyncTask =
            flinkDataSyncTaskMapper.selectById(param.getTaskId());
        if (flinkDataSyncTask.getTaskStatus() == 2) {
            throw new BusinessException("不能重复发布");
        }

        List<String> logLine = Lists.newArrayList();
        // flink cdc 任务
        if (Objects.nonNull(flinkDataSyncTask)
            && !flinkDataSyncTask.getSourceDsType().equalsIgnoreCase(DSType.Kafka.name())
            && !flinkDataSyncTask.getSourceDsType().equalsIgnoreCase(DSType.MQTT.name())) {
            String sqlFile = start(flinkDataSyncTask);
            LambdaUpdateWrapper<FlinkDataSyncTaskEntity> update =
                new UpdateWrapper<FlinkDataSyncTaskEntity>().lambda();
            update.eq(FlinkDataSyncTaskEntity::getId, param.getTaskId());
            update.set(
                FlinkDataSyncTaskEntity::getTaskStatus,
                OfflineSyncTaskStatus.released.getTaskStatus());
            update.set(FlinkDataSyncTaskEntity::getStartTime, new Date());
            update.set(FlinkDataSyncTaskEntity::getEndTime, null);
            update.set(FlinkDataSyncTaskEntity::getIsAlarmed, 0);
            update.set(
                FlinkDataSyncTaskEntity::getTaskRunStatus,
                OfflineSyncTaskStatus.running.getTaskStatus());
            this.update(update);
            Thread thread =
                new Thread(
                    () -> {
                        try {
                            FlinkUtil.StartJobParam startJobParam = new FlinkUtil.StartJobParam();
                            startJobParam.setEngineJobId(param.getTaskId());
                            startJobParam.setSqlFile(sqlFile);
                            startJobParam.setDebug(false);
                            FlinkUtil.FlinkJobResult flinkJobResult = FlinkUtil.startJob(startJobParam);
                            log.info(String.format("fuchen_test job info:%s",
                                flinkJobResult.getLogList().toString()));
                            LinkedHashSet<String> flinkJobIds = flinkJobResult.getFlinkJobIds();
                            // update
                            LambdaUpdateWrapper<FlinkDataSyncTaskEntity> update2 =
                                new UpdateWrapper<FlinkDataSyncTaskEntity>().lambda();
                            update2.eq(FlinkDataSyncTaskEntity::getId, param.getTaskId());
                            if (CollectionUtils.isNotEmpty(flinkJobIds)) {
                                update2.set(FlinkDataSyncTaskEntity::getFlinkJobId,
                                    StringUtils.join(flinkJobIds, ","));
                                this.update(update2);

                                // 如果没有成功发布
                            } else {
                                update2.set(
                                    FlinkDataSyncTaskEntity::getTaskRunStatus,
                                    OfflineSyncTaskStatus.failure.getTaskStatus());
                                this.update(update2);
                            }
                        } catch (Exception e) {
                            log.error(
                                "--------------------FlinkUtil.startJob异常---------------------node_id:"
                                    + param.getTaskId());
                            log.error(e.getMessage(), e);
                            logLine.add(e.getMessage());
                            FlinkDataSyncTaskEntity byId = this.getById(param.getTaskId());
                            String flinkJobId = byId.getFlinkJobId();
                            if (StringUtils.isNotBlank(flinkJobId)) {
                                String[] split = flinkJobId.split(",");
                                for (String jobId : split) {
                                    try {
                                        FlinkUtil.stopJob(jobId, flinkJobId, false);
                                    } catch (Exception e1) {
                                        logLine.add(e1.getMessage());
                                        log.error("stop applicationId:{},jobId:{} error", jobId, flinkJobId);
                                    }
                                }
                            }
                            // 如果超时，停止任务
                            LambdaUpdateWrapper<FlinkDataSyncTaskEntity> update3 =
                                new UpdateWrapper<FlinkDataSyncTaskEntity>().lambda();
                            update3.eq(FlinkDataSyncTaskEntity::getId, param.getTaskId());
                            update3.set(
                                FlinkDataSyncTaskEntity::getTaskStatus,
                                OfflineSyncTaskStatus.released.getTaskStatus());
                            update3.set(FlinkDataSyncTaskEntity::getStartTime, new Date());
                            update3.set(FlinkDataSyncTaskEntity::getEndTime, new Date());
                            update3.set(FlinkDataSyncTaskEntity::getIsAlarmed, 0);
                            update3.set(
                                FlinkDataSyncTaskEntity::getTaskRunStatus,
                                OfflineSyncTaskStatus.failure.getTaskStatus());
                            this.writeFlinkLog(param.getTaskId(), logLine);
                            this.update(update3);
                        }
                    });
            thread.start();
        } else {
            // 来源为kafka/mqtt
            //      OnlineTaskRunningRecordEntity query = new OnlineTaskRunningRecordEntity();
            //      query.setTaskId(param.getTaskId());
            //      query.setTaskStatus(OfflineSyncTaskStatus.running.getTaskStatus());
            //      OnlineTaskRunningRecordEntity running =
            //          onlineTaskRunningRecordMapper.findOneTkSelectStyle(query);
            Integer workingStatus = flinkDataSyncTask.getWorkingStatus();

            if (Objects.equals(workingStatus, OfflineSyncTaskStatus.running.getTaskStatus())) {

                throw new BusinessException("该任务正在运行中");
            }
            // fixme
            // MqDataSyncTaskEntity mqTask = mqDataSyncTaskMapper.findById(param.getTaskId());
            MD5 md5 = MD5.create();
            // String taskIdMD5 = md5.digestHex16(param.getTaskId());
            String taskIdMD5 = md5.digestHex16(String.valueOf(flinkDataSyncTask.getId()));
            param.setTaskIdMD5(taskIdMD5);
            startOnlineKakfaTask(param);
        }
    }


    @SneakyThrows
    public void writeFlinkLog(Long taskId, List<String> lines) {
        if (Objects.isNull(taskId)) {
            throw new IllegalArgumentException("invalid recordId : " + taskId);
        }
        if (CollectionUtils.isEmpty(lines)) {
            return;
        }
        String fileName = taskId + ".log";
        String flinkLog = flinkConfig.getFlinkLog();

        File file = new File(flinkLog + IOUtils.DIR_SEPARATOR + fileName);
        synchronized (LogUtil.class) {
            if (!file.exists()) {
                file.createNewFile();
            }
            FileUtils.writeLines(file, "UTF-8", lines, true);
        }
    }

    /**
     * 重跑
     */
    public void reRun(Long id) {
        FlinkDataSyncTaskEntity flinkDataSyncTask = flinkDataSyncTaskMapper.selectById(id);

        // stop
        try {
            stop(flinkDataSyncTask.getFlinkJobId());
        } catch (Exception e) {

        }

        // start
        String sqlFile = start(flinkDataSyncTask);

        // update
        flinkDataSyncTask = new FlinkDataSyncTaskEntity();
        flinkDataSyncTask.setId(id);
        flinkDataSyncTask.setStartTime(new Date());
        flinkDataSyncTask.setEndTime(null);
        flinkDataSyncTaskMapper.updateById(flinkDataSyncTask);
    }

    /**
     * 下线
     */
    public void unPublish(Long id) {
        FlinkDataSyncTaskEntity flinkDataSyncTask = flinkDataSyncTaskMapper.selectById(id);
        String sourceDsType = flinkDataSyncTask.getSourceDsType();
        // stop
        if (!sourceDsType.equalsIgnoreCase(DSType.Kafka.name())
            && !sourceDsType.equalsIgnoreCase(DSType.MQTT.name())) {
            if (StringUtils.isNotBlank(flinkDataSyncTask.getFlinkJobId())) {
                try {
                    stop(flinkDataSyncTask.getFlinkJobId());
                } catch (Exception e) {

                }
            }
            // update
            flinkDataSyncTask = new FlinkDataSyncTaskEntity();
            flinkDataSyncTask.setId(id);
            flinkDataSyncTask.setTaskStatus(OfflineSyncTaskStatus.torelease.getTaskStatus());
            flinkDataSyncTask.setTaskRunStatus(OfflineSyncTaskStatus.unrunning.getTaskStatus());
            flinkDataSyncTask.setEndTime(new Date());
            flinkDataSyncTaskMapper.updateById(flinkDataSyncTask);
        } else {
            String jmxServer = flinkDataSyncTask.getJmxServer();
            Integer jmxPort = flinkDataSyncTask.getJmxPort();
            if (jmxPort != null) {
                JmxUtil.closeClient(jmxServer, jmxPort);
            }
            flinkDataSyncTask.setTaskStatus(OfflineSyncTaskStatus.torelease.getTaskStatus());
            flinkDataSyncTask.setTaskRunStatus(OfflineSyncTaskStatus.unrunning.getTaskStatus());
            flinkDataSyncTask.setJmxServer("");
            flinkDataSyncTask.setJmxPort(-1);
            flinkDataSyncTask.setEndTime(new Date());
            flinkDataSyncTaskMapper.updateById(flinkDataSyncTask);
        }
    }

    /**
     * 干掉
     */
//    @BizDataIndexAnnotation(
//        type = BizDataIndexTypeEnum.ONLINE_TASK,
//        dataIdFieldName = "id",
//        dataIdParamIndex = 0,
//        operateType = BizDataIndexAnnotation.OperateType.DELETE)
    public void delete(Long id) {
        FlinkDataSyncTaskEntity flinkDataSyncTask = flinkDataSyncTaskMapper.selectById(id);
        String sourceDsType = flinkDataSyncTask.getSourceDsType();
        // stop
        if (!sourceDsType.equalsIgnoreCase(DSType.Kafka.name())
            && !sourceDsType.equalsIgnoreCase(DSType.MQTT.name())) {
            if (StringUtils.isNotBlank(flinkDataSyncTask.getFlinkJobId())) {
                try {
                    stop(flinkDataSyncTask.getFlinkJobId());
                } catch (Exception e) {

                }
            }
            // delete xxlJob
            xxlJobService.remove(flinkDataSyncTask.getXxlJobInfoId());
        } else {
            String jmxServer = flinkDataSyncTask.getJmxServer();
            Integer jmxPort = flinkDataSyncTask.getJmxPort();
            if (jmxPort != null) {
                JmxUtil.closeClient(jmxServer, jmxPort);
            }
        }
        // delete
        flinkDataSyncTaskMapper.deleteById(flinkDataSyncTask.getId());
    }

    /**
     * 如何获取到 xxlJobLogId
     */

    //  生成SQL文件
    public String start(FlinkDataSyncTaskEntity flinkTaskEntity) {
        try {
            List<String> flinkSqlList = generateFlinkSqlList(flinkTaskEntity);
            log.info("生成的flinksql：" + flinkSqlList.toString());
            String dir = fileStorageConfiguration.getDir();
            String flinkSqlFilePath =
                String.format("%s/%s/%s.sql", dir, "flink", flinkTaskEntity.getId());
            File flinkSqlFile = new File(flinkSqlFilePath);
            flinkSqlFile.delete();
            CommonFileUtil.writeToFile(flinkSqlFile, flinkSqlList);
            return flinkSqlFilePath;
        } catch (IOException e) {
            throw new BusinessException("生成Fink SQL失败");
        }
    }

    private void stop(String flinkJobLogId) throws Exception {

        String home = flinkConfig.getHome();
        String cmd = home + "flink cancel " + flinkJobLogId;
        CmdUtil.runCmd(cmd);
    }

    /**
     * 使用清空再insert套路
     */
//    @BizDataIndexAnnotation(
//        type = BizDataIndexTypeEnum.ONLINE_TASK,
//        dataIdFieldName = "id",
//        dataIdParamIndex = 0,
//        operateType = BizDataIndexAnnotation.OperateType.SAVE)
    public void update(FlinkDataSyncTaskDomain flinkDataSyncTask) {
        flinkDataSyncTaskMapper.deleteById(flinkDataSyncTask.getId());
        FlinkDataSyncTaskEntity taskEntity =
            Convert.convert(FlinkDataSyncTaskEntity.class, flinkDataSyncTask);
        flinkDataSyncTaskMapper.insert(taskEntity);
    }

  /*public List<String> generateFlinkSqlList(FlinkDataSyncTaskEntity flinkDataSyncTask) {
    List<String> list = new ArrayList<>();
    list.add(
        "create table `mongo_hive_source` ( `_id` STRING,`name` STRING,`time` TIMESTAMP,primary key (`_id`) not enforced ) WITH ( 'connector'='mongodb-cdc','hosts'='10.88.36.187:27017','username'='hufu','password'='123456','database'='hufu','collection'='mongo_hive','connection.options'='authSource=hufu');");
    list.add(
        "create table `mongo_hive_target` ( `id` STRING,`name` STRING,`time` TIMESTAMP,primary key (`id`) not enforced ) WITH ( 'connector'='upsert-kafka','topic'='mongo_hive','properties.bootstrap.servers'='10.88.36.79:9092','key.format'='json','value.format'='json','value.json.fail-on-missing-field'='false','value.json.ignore-parse-errors'='true');");
    list.add(
        "insert into `mongo_hive_target` (`id`,`name`,`time`)  select `_id`,`name`,`time`  from `mongo_hive_source`;");
    return list;
  }*/

    public List<String> generateFlinkSqlList(FlinkDataSyncTaskEntity task) {
        List<FieldMapping> fieldMappings = task.getFieldMappings();

        String sourceDsType = task.getSourceDsType();
        if (CollectionUtils.isEmpty(fieldMappings)) {
            throw new BusinessException("实时采集字段映射列表不存在，请检查后重试！");
        }
        List<TableConfigDomain.FieldInfo> sourceFields = Lists.newArrayList();
        List<TableConfigDomain.FieldInfo> targetFields = Lists.newArrayList();
        fieldMappings.forEach(
            f -> {
                // mongodb来源 _id字段默认为主键
                boolean pkflag =
                    sourceDsType.equalsIgnoreCase(DSType.Mongodb.name())
                        && f.getSourceField().equals("_id")
                        ? true
                        : f.isSourceFieldPk();
                sourceFields.add(
                    TableConfigDomain.FieldInfo.builder()
                        .name(f.getSourceField())
                        .type(f.getSourceFieldType())
                        .isPartition(f.isSourcePartition())
                        .isPk(pkflag)
                        .build());
                targetFields.add(
                    TableConfigDomain.FieldInfo.builder()
                        .name(f.getTargetField())
                        .type(f.getTargetFieldType())
                        .isPartition(f.isTargetPartition())
                        .isPk(pkflag)
                        .build());
            });
        TableConfigDomain sourceTableConfig =
            TableConfigDomain.builder()
                .dsId(task.getSourceDsId())
                .dsType(task.getSourceDsType())
                .tableName(task.getSourceTableOrTopic())
                .isSource(Boolean.TRUE)
                .useCdc(Boolean.TRUE)
                .start(task.getStart())
                .fieldInfoList(sourceFields)
                .schema(task.getSourceSchema())
                .build();
        TableConfigDomain targetTableConfig =
            TableConfigDomain.builder()
                .dsId(task.getTargetDsId())
                .dsType(task.getTargetDsType())
                .tableName(task.getTargetTableOrTopic())
                .isSource(Boolean.FALSE)
                .fieldInfoList(targetFields)
                .schema(task.getTargetSchema())
                .build();
        String souceTableSql = flinkDataSyncExecService.buildFlinkSqlTable(sourceTableConfig);
        String targetTableSql = flinkDataSyncExecService.buildFlinkSqlTable(targetTableConfig);
        String transformSql = flinkDataSyncExecService.buildTransformSql(task);
        return Arrays.asList(souceTableSql, targetTableSql, transformSql);
    }

    public String generateFlinkSqlBase64Str(List<String> flinkSqlList) {
        StringBuilder stringBuilder = new StringBuilder();
        for (String flinkSql : flinkSqlList) {
            stringBuilder.append(flinkSql);
        }

        return new String(
            Base64.getEncoder().encode(stringBuilder.toString().getBytes(StandardCharsets.UTF_8)),
            StandardCharsets.UTF_8);
        // return stringBuilder.toString();
    }

    public Set<String> getTopics(Long dsId) {
        DataSourceDetailDomain dataSourceDetail =
            dataSourceService.getDataSourceDetailById(dsId, false);
        String dsLink = dataSourceDetail.getDsLink();
        //    dsLink="10.88.36.186:9092";
        Set<String> topics = KafkaUtil.kafkaTopic(dsLink);
        return topics;
    }

    public PageResult<FlinkDataSyncTaskPageDomain> getPageList(FlinkDataSyncTaskQueryDomain domain) {
        Long projectId = ServletUtils.getProjectId();
        Long dirId = domain.getDirId();
        List<Long> ids = Lists.newArrayList();
        if (!Objects.isNull(dirId) && !Objects.equals(0, dirId)) {
            List<DirectoryEntity> childrenById =
                directoryService.getChildrenById(
                    dirId, projectId, DirectoryMenuEnum.ONLINE);
            ids = childrenById.stream().map(DirectoryEntity::getId).collect(Collectors.toList());
            if (Objects.nonNull(dirId)) {
                ids.add(dirId);
            }
        }

        LambdaQueryWrapper<FlinkDataSyncTaskEntity> query =
            new QueryWrapper<FlinkDataSyncTaskEntity>().lambda();
        query
            .like(
                StringUtils.isNotBlank(domain.getTaskName()),
                FlinkDataSyncTaskEntity::getTaskName,
                domain.getTaskName())
            .eq(
                !Objects.isNull(domain.getTaskStatus()) && domain.getTaskStatus() > 0,
                FlinkDataSyncTaskEntity::getTaskStatus,
                domain.getTaskStatus())
            .eq(Objects.nonNull(projectId), FlinkDataSyncTaskEntity::getProjectId, projectId)
            .in(CollectionUtils.isNotEmpty(ids), FlinkDataSyncTaskEntity::getDirId, ids)
            .eq(
                Objects.nonNull(domain.getTaskRunStatus()),
                FlinkDataSyncTaskEntity::getTaskRunStatus,
                domain.getTaskRunStatus())
            .ge(
                Objects.nonNull(domain.getPublishDate()),
                FlinkDataSyncTaskEntity::getStartTime,
                domain.getPublishDate())
            .le(
                Objects.nonNull(domain.getEndTime()),
                FlinkDataSyncTaskEntity::getStartTime,
                domain.getEndTime())
            .orderByDesc(FlinkDataSyncTaskEntity::getUpdateTime);
        IPage<FlinkDataSyncTaskPageDomain> page =
            this.page(new Page<>(domain.getPageNum(), domain.getPageSize()), query)
                .convert(e -> Convert.convert(FlinkDataSyncTaskPageDomain.class, e));

        List<FlinkDataSyncTaskPageDomain> domainList = page.getRecords();

        if (CollectionUtils.isNotEmpty(domainList)) {

            // 数据源信息
            Set<Long> dsIds = new HashSet<>();
            Set<String> userIds = new HashSet<>();
            domainList.stream()
                .forEach(
                    e -> {
                        dsIds.add(e.getSourceDsId());
                        dsIds.add(e.getTargetDsId());
                        userIds.add(e.getCreatorId());
                    });
            List<DataSourceDomain> dataSourceDomains = dataSourceService.getDataSourceDomainByIds(dsIds);
            Map<Long, DataSourceDomain> dsDomainMap =
                dataSourceDomains.stream()
                    .collect(Collectors.toMap(d -> d.getId(), d -> d, (d1, d2) -> d1));
            // 创建人信息
            Map<String, String> userMap = userService.getMapByUserIds(new ArrayList<>(userIds));
            domainList.stream()
                .forEach(
                    d -> {
                        List<String> sources = Lists.newArrayList();
                        List<String> targets = Lists.newArrayList();
                        Long sourceDsId = d.getSourceDsId();
                        DataSourceEntity byId = dataSourceService.getById(sourceDsId);
                        if (!byId.getDsType().equalsIgnoreCase(DSType.Kafka.name()) || !byId.getDsType()
                            .equalsIgnoreCase(DSType.MQTT.name())) {
                            d.setRecordId(String.valueOf(d.getId()));
                        }
                        DataSourceDomain dataSourceDomain = null;
                        if (Objects.nonNull(sourceDsId)) {
                            dataSourceDomain = dsDomainMap.get(Long.valueOf(sourceDsId));
                        }
                        Long targetDsId = d.getTargetDsId();
                        DataSourceDomain dataTargetDomain = null;
                        if (Objects.nonNull(targetDsId)) {
                            dataTargetDomain = dsDomainMap.get(Long.valueOf(targetDsId));
                        }

                        if (Objects.nonNull(dataSourceDomain)) {
                            String sourceDsName = dataSourceDomain.getDsName();
                            sources.add(sourceDsName + "/" + d.getSourceTableOrTopic());
                        }
                        if (Objects.nonNull(dataTargetDomain)) {
                            String targetDsName =
                                dsDomainMap.get(Long.valueOf(d.getTargetDsId())).getDsName();
                            targets.add(targetDsName + "/" + d.getTargetTableOrTopic());
                        }
                        d.setSource(String.join(",", sources));
                        d.setTarget(String.join(",", targets));
                        d.setCreator(userMap.get(d.getCreatorId()));
                    });
        }
        PageResult<FlinkDataSyncTaskPageDomain> pageDomain =
            PageResult.copyPropertiesAndConvert(page, FlinkDataSyncTaskPageDomain.class);

        return pageDomain;
    }

    public FlinkTaskSummaryDomain getSummary() {
        Long projectId = ServletUtils.getProjectId();
        List<FlinkDataSyncTaskEntity> summaryList = flinkDataSyncTaskMapper.getSummaryList(projectId);

        Long running =
            summaryList.stream()
                .filter(i -> i.getTaskRunStatus().equals(OfflineSyncTaskStatus.running.getTaskStatus()))
                .count();
        Long torelease =
            summaryList.stream()
                .filter(i -> i.getTaskStatus().equals(OfflineSyncTaskStatus.torelease.getTaskStatus()))
                .count();
        Long failure =
            summaryList.stream()
                .filter(i -> i.getTaskRunStatus().equals(OfflineSyncTaskStatus.failure.getTaskStatus()))
                .count();

        FlinkTaskSummaryDomain summary =
            FlinkTaskSummaryDomain.builder()
                .running(running.intValue())
                .toBeReleased(torelease.intValue())
                .fail(failure.intValue())
                .build();

        return summary;
    }

    public FlinkDataSyncTaskDomain getDetailInfo(Long id) {
        FlinkDataSyncTaskEntity byId = this.getById(id);
        if (null == byId) {
            return null;
        }

        FlinkDataSyncTaskDomain domain = Convert.convert(FlinkDataSyncTaskDomain.class, byId);
        List<FieldMapping> fieldMappings = byId.getFieldMappings();
        List<FlinkDataSyncTaskDomain.FieldMapping> fieldMappings1 =
            Convert.toList(FlinkDataSyncTaskDomain.FieldMapping.class, fieldMappings);
        domain.setFieldMappings(fieldMappings1);

        DataSourceDetailDomain sourceDsInfo =
            dataSourceService.getDataSourceDetailById(domain.getSourceDsId(), false);
        DataSourceDetailDomain targetDsInfo =
            dataSourceService.getDataSourceDetailById(domain.getTargetDsId(), false);
        domain.setSourceDsName(sourceDsInfo.getDsName());
        domain.setTargetDsName(targetDsInfo.getDsName());
        return domain;
    }

    public boolean checkTaskName(String taskName) {
        Long projectId = ServletUtils.getProjectId();
        List<FlinkDataSyncTaskEntity> flinkDataSyncTaskEntities =
            flinkDataSyncTaskMapper.checkTaskName(taskName, projectId);
        return CollectionUtils.isEmpty(flinkDataSyncTaskEntities);
    }

    // --------------------------------------flink

    public Long startOnlineKakfaTask(StartOnlineKafkaTaskParam param) {
        String localHost = null;
        try {
            InetAddress inetAddress = InetAddress.getLocalHost();
            localHost = inetAddress.getHostAddress();
        } catch (UnknownHostException e) {
            log.info("获取本地ip失败{}", e.getMessage());
        }
        // 获取jmxPort
        Integer jmxPort = getJmxPort();

        // 生成执行记录
        FlinkDataSyncTaskEntity onlineTaskRunningRecordEntity = new FlinkDataSyncTaskEntity();
        //    OnlineTaskRunningRecordEntity onlineTaskRunningRecordEntity =
        //        new OnlineTaskRunningRecordEntity();
        Long onlineTaskRunningRecordId = param.getTaskId();
        LambdaUpdateWrapper<FlinkDataSyncTaskEntity> update =
            new UpdateWrapper<FlinkDataSyncTaskEntity>().lambda();
        update.set(FlinkDataSyncTaskEntity::getFailNum, 0);
        update.set(FlinkDataSyncTaskEntity::getCreateTime, new Date());
        update.set(FlinkDataSyncTaskEntity::getDelay, null);
        update.set(FlinkDataSyncTaskEntity::getLastSyncTime, null);
        update.set(FlinkDataSyncTaskEntity::getEndTime, null);
        update.set(FlinkDataSyncTaskEntity::getStartPointTime, param.getStartPointTime());
        update.set(FlinkDataSyncTaskEntity::getStartTime, new Date());
        update.eq(FlinkDataSyncTaskEntity::getId, param.getTaskId());
        update.set(FlinkDataSyncTaskEntity::getJmxPort, jmxPort);
        update.set(FlinkDataSyncTaskEntity::getJmxServer, localHost);
        update.set(
            FlinkDataSyncTaskEntity::getTaskStatus, OfflineSyncTaskStatus.released.getTaskStatus());
        update.set(
            FlinkDataSyncTaskEntity::getTaskRunStatus, OfflineSyncTaskStatus.running.getTaskStatus());
        update.set(FlinkDataSyncTaskEntity::getTaskIdMd5, param.getTaskIdMD5());
        update.set(FlinkDataSyncTaskEntity::getRecordId, onlineTaskRunningRecordId);
        //    onlineTaskRunningRecordMapper.insertOne(onlineTaskRunningRecordEntity);
        this.update(update);
        // String onlineTaskRunningRecordId = onlineTaskRunningRecordEntity.getId();

        // restResponseEntity.setData(onlineTaskRunningRecordId);

        String dirPath = this.clientHome + File.separator + "json";
        File file = new File(dirPath + File.separator + UUID.randomUUID() + ".json");
        if (!file.getParentFile().exists()) {
            file.getParentFile().mkdirs();
        }
        String json = "";
        try {
            // 初始化参数
            OnlineTaskParam onlineTaskParam = initParams(param, onlineTaskRunningRecordId, jmxPort);
            json = JSONObject.toJSONString(onlineTaskParam);
            // 写到了文件中给后续的执行使用
            FileUtils.writeStringToFile(file, json, "UTF-8");
        } catch (IOException e) {
            log.error(e.getMessage(), e);

            try {
                onlineTaskRunningRecordEntity.setId(param.getTaskId());
                LogUtil.writeOnlineLog(
                    onlineTaskRunningRecordEntity.getId(), Lists.newArrayList("创建执行文件失败"));
                String log = String.format("生成json：%s", JSONObject.toJSONString(json));
                LogUtil.writeOnlineLog(onlineTaskRunningRecordEntity.getId(), Lists.newArrayList(log));
            } catch (IOException e1) {
                log.error(e.getMessage(), e);
            }

            onlineTaskRunningRecordEntity.setTaskRunStatus(OfflineSyncTaskStatus.failure.getTaskStatus());
            onlineTaskRunningRecordEntity.setJmxPort(null);
            onlineTaskRunningRecordEntity.setEndTime(new Date());
            flinkDataSyncTaskMapper.updateById(onlineTaskRunningRecordEntity);

            if (file.exists()) {
                // file.delete();
            }

            return onlineTaskRunningRecordId;
        }

    /* String commandTemplate =
        "nohup java -server -Xms256m -Xmx512m -Dlog.home=%s -Dlog.name=%s "
            + "-Dloader.path=%s"
            + File.separator
            + "lib"
            + File.separator
            + "* "
            + "-jar %s"
            + File.separator
            + "hufu-online-task-client.jar  "
            + " %s 1>/dev/null 2>&1 &";
    String command =
        String.format(
            commandTemplate,
            hufuProperties.getLog().getOnline(),
            onlineTaskRunningRecordId,
            hufuProperties.getOnlineTaskClient().getClientHome(),
            hufuProperties.getOnlineTaskClient().getClientHome(),
            file.getAbsolutePath());
    log.info("实时采集command：" + command); */
        String[] command = {
            "nohup",
            "java",
            "-server",
            "-Xms256m",
            "-Xmx512m",
            "-Dlog.home=" + logConfig.getOnline(),
            "-Dlog.name=" + onlineTaskRunningRecordId,
//            "-Dloader.path=" + this.clientHome + "/lib/* ",
            "-jar",
            this.clientHome + "/data-engine-online-task-client.jar",
            file.getAbsolutePath(),
            "1>/dev/null",
            "2>&1",
            "&"
        };
        try {

            Process process = Runtime.getRuntime().exec(command);
            // Process process = Runtime.getRuntime().exec(new String[] {"/bin/bash", "-c", command});
            process.getInputStream().close();
            process.getErrorStream().close();
            process.getOutputStream().close();

      /* int code = process.waitFor();
      if (code != 0) {
        LogUtil.writeOnlineLog(onlineTaskRunningRecordEntity.getId(), Lists.newArrayList("启动失败"));
        onlineTaskRunningRecordEntity.setTaskRunStatus(
            OfflineSyncTaskStatus.failure.getTaskStatus());
        onlineTaskRunningRecordEntity.setJmxPort(null);
        onlineTaskRunningRecordEntity.setEndTime(new Date());
        onlineTaskRunningRecordEntity.setIsAlarmed(0);
        flinkDataSyncTaskMapper.updateById(onlineTaskRunningRecordEntity);
        if (file.exists()) {
          // file.delete();
        }
        return onlineTaskRunningRecordId;
      } */
        } catch (Exception e) {
            try {
                LogUtil.writeOnlineLog(
                    onlineTaskRunningRecordEntity.getId(), Lists.newArrayList(e.getMessage()));
            } catch (IOException e1) {
                log.error(e1.getMessage(), e1);
            }
            onlineTaskRunningRecordEntity.setTaskRunStatus(OfflineSyncTaskStatus.failure.getTaskStatus());
            onlineTaskRunningRecordEntity.setJmxPort(-1);
            onlineTaskRunningRecordEntity.setEndTime(new Date());
            flinkDataSyncTaskMapper.updateById(onlineTaskRunningRecordEntity);
            if (file.exists()) {
                // file.delete();
            }

            return onlineTaskRunningRecordId;
        }

        return onlineTaskRunningRecordId;
    }

    private Integer getJmxPort() {
        int jmxPort = 40000 + new Random().nextInt(10000);
        //    FlinkDataSyncTaskEntity query1 = new FlinkDataSyncTaskEntity();
        //    query1.setJmxPort(jmxPort);
        LambdaQueryWrapper<FlinkDataSyncTaskEntity> query =
            new QueryWrapper<FlinkDataSyncTaskEntity>().lambda();
        query.eq(FlinkDataSyncTaskEntity::getJmxPort, jmxPort);
        int count = 0;
        List<FlinkDataSyncTaskEntity> multiTkSelectStyle = flinkDataSyncTaskMapper.selectList(query);
        if (CollectionUtils.isNotEmpty(multiTkSelectStyle)) {
            count = multiTkSelectStyle.size();
        }
        if (count > 0) {
            return getJmxPort();
        }

        return jmxPort;
    }

    private OnlineTaskParam initParams(
        StartOnlineKafkaTaskParam startOnlineTaskParam,
        Long onlineTaskRunningRecordId,
        Integer jmxPort) {

        OnlineTaskParam onlineTaskParam = new OnlineTaskParam();

        onlineTaskParam.setRealTimePointMappingCsvFile(this.realTimePointMappingCsvFile);

        onlineTaskParam.setJmxPort(jmxPort);
        onlineTaskParam.setRecordId(onlineTaskRunningRecordId);

        onlineTaskParam.setKeyTabPath(null);
        onlineTaskParam.setKrb5Conf(null);
        //    onlineTaskParam.setNameNodes(this.hdfsNamenodes);
        //    onlineTaskParam.setNameNodesAddrs(this.hdfsNamenodesaddr);
        //    onlineTaskParam.setNameServices(this.hdfsNameservices);
        onlineTaskParam.setNameNodes("");
        onlineTaskParam.setNameNodesAddrs("");
        onlineTaskParam.setNameServices("");
        if (startOnlineTaskParam.getSetStartPoint() != null
            && startOnlineTaskParam.getSetStartPoint().equals(1)) {
            onlineTaskParam.setOffsetTime(startOnlineTaskParam.getStartPointTime());
        }
        onlineTaskParam.setTaskId(startOnlineTaskParam.getTaskId());
        onlineTaskParam.setTaskIdMD5(startOnlineTaskParam.getTaskIdMD5());
        FlinkDataSyncTaskEntity onlineTaskEntity =
            flinkDataSyncTaskMapper.selectById(startOnlineTaskParam.getTaskId());

        // 获取hive db
        // StorageEntity storage = storageService.getById(onlineTaskEntity.getTargetDsId());
        DataSourceDetailDomain dataSource =
            dataSourceService.getDataSourceDetailById(onlineTaskEntity.getTargetDsId(), false);
        String db = StringUtils.EMPTY;
        String username = StringUtils.EMPTY;
        String password = StringUtils.EMPTY;
        String urlLink = StringUtils.EMPTY;
        String hiveSiteXmlPath = StringUtils.EMPTY;
        String server = StringUtils.EMPTY;
        Integer port = 0;
        if (Objects.nonNull(dataSource)) {
            if (onlineTaskEntity.getTargetDsType().equals(DSType.Hive.name())) {
                db = dataSource.getHiveConfig().getDbName();
                hiveSiteXmlPath = dataSource.getHiveConfig().getHiveSiteAddress();
            }
            db = dataSource.getDbName();
            username = dataSource.getUsername();
            password = dataSource.getPassword();
            urlLink = dataSource.getDsLink();
        }
        onlineTaskParam.setTaskName(onlineTaskEntity.getTaskName());
        onlineTaskParam.setTopic(onlineTaskEntity.getSourceTableOrTopic());
        onlineTaskParam.setDb(db);
        DataSourceDetailDomain.RdbmsConfig rdbmsConfig = dataSource.getRdbmsConfig();
        if (Objects.nonNull(rdbmsConfig)) {
            onlineTaskParam.setSchema(dataSource.getRdbmsConfig().getSchema());
        }

        // 目标表或者topic 2022/4/14
        onlineTaskParam.setTable(onlineTaskEntity.getTargetTableOrTopic());

        DataSourceDetailDomain tbDatasourceBaseEntity =
            dataSourceService.getDataSourceDetailById(onlineTaskEntity.getSourceDsId(), false);
        // kafka
        if (onlineTaskEntity.getSourceDsType().equals(DSType.Kafka.name())) {
            // 获取topic
            onlineTaskParam.setType(DSType.Kafka.name());
            onlineTaskParam.setHosts(tbDatasourceBaseEntity.getDsLink());

            //      if (org.apache.commons.lang3.StringUtils.isNotBlank(
            //          tbDatasourceBaseEntity.getConnectionProperty())) {
            //        Properties properties =
            //            JSONObject.parseObject(
            //                tbDatasourceBaseEntity.getConnectionProperty(), Properties.class);
            //        onlineTaskParam.setProperties(properties);
            //      }
        } else if (onlineTaskEntity.getSourceDsType().equals(DSType.MQTT.name())) {
            // 获取mqtt
            onlineTaskParam.setType(DSType.MQTT.name());
            onlineTaskParam.setHosts(tbDatasourceBaseEntity.getDsLink());
        }

        List<FlinkDataSyncTaskEntity.FiledExpess> fieldExpressRequests =
            onlineTaskEntity.getFieldExpressRequests();
        Map<String, String> fieldExpMap = Maps.newHashMap();
        if (CollectionUtils.isNotEmpty(fieldExpressRequests)) {
            fieldExpMap =
                fieldExpressRequests.stream()
                    .collect(
                        Collectors.toMap(
                            data -> data.getFieldName(),
                            data -> data.getFieldExpress(),
                            (key1, key2) -> key1));
        }

        List<FlinkDataSyncTaskEntity.FieldMapping> fieldMappings = onlineTaskEntity.getFieldMappings();
        List<String> fromColumns = Lists.newArrayList();
        List<String> toColumns = Lists.newArrayList();
        List<String> jsonPathExps = Lists.newArrayList();
        List<String> tagColumns = Lists.newArrayList();
        if (CollectionUtils.isNotEmpty(fieldMappings)) {
            for (FlinkDataSyncTaskEntity.FieldMapping fileldMap : fieldMappings) {
                // fromColumns.add(fileldMap.getSourceFieldExpress());
                fromColumns.add(fileldMap.getSourceField());
                toColumns.add(fileldMap.getTargetField());
                jsonPathExps.add(fieldExpMap.get(fileldMap.getSourceField()));
                if (fileldMap.isTargetTag()) {
                    tagColumns.add(fileldMap.getTargetField());
                }
            }
        }

        onlineTaskParam.setFromColumn(fromColumns);
        onlineTaskParam.setToColumn(toColumns);
        onlineTaskParam.setTagColumn(tagColumns);
        // 2022/4/14 新增参数
        onlineTaskParam.setJdbcUrl(urlLink);
        onlineTaskParam.setUsername(username);
        onlineTaskParam.setPassword(password);
        onlineTaskParam.setTargetType(onlineTaskEntity.getTargetDsType());
        onlineTaskParam.setUseJsFlag(onlineTaskEntity.getUseJsFlag());
        onlineTaskParam.setJsonPathExp(jsonPathExps);
        onlineTaskParam.setJsContent(onlineTaskEntity.getJsContent());
        // kafka/mtqq host存在dslink中
        onlineTaskParam.setTargetHosts(urlLink);
        onlineTaskParam.setTargetTopic(onlineTaskEntity.getTargetTableOrTopic());
        if (!onlineTaskEntity.getTargetDsType().equals(DSType.MQTT.name())
            && !onlineTaskEntity.getTargetDsType().equals(DSType.Kafka.name())) {
            List<String> partitionColumn = Lists.newArrayList();
            List<String> partitionValues = Lists.newArrayList();

            List<FlinkDataSyncTaskEntity.ParitionFieldMap> paritionFieldMaps =
                onlineTaskEntity.getParitionFieldMaps();
            if (CollectionUtils.isNotEmpty(paritionFieldMaps)) {
                paritionFieldMaps.forEach(
                    paritionFieldMap -> {
                        partitionColumn.add(paritionFieldMap.getField());
                        partitionValues.add(paritionFieldMap.getValue());
                    });
                onlineTaskParam.setPartitionColumn(partitionColumn);
                onlineTaskParam.setPartitionValue(partitionValues);
            }

            // 获取hive 表字段信息
            DbOptions dbOptions = new DbOptions();
            dbOptions.setUrl(urlLink);
            dbOptions.setUsername(username);
            dbOptions.setPassword(password);
            dbOptions.setHiveSiteXmlPath(hiveSiteXmlPath);
            //      HiveManager hiveManager = new HiveManager(dbOptions);
            BaseDbManager dbManager =
                DbManagerFactory.getDbManager(
                    Database.from(onlineTaskEntity.getTargetDsType()), dbOptions);

            List<FieldInfoDomain> fieldList = Lists.newArrayList();
            if (Objects.nonNull(dbManager)) {
                try {
                    if (onlineTaskEntity.getTargetDsType().equalsIgnoreCase(DSType.PostgreSQL.name())) {
                        String[] split = onlineTaskEntity.getTargetTableOrTopic().split("\\.");
                        fieldList = dbManager.getFieldList(db, split[1]);
                    } else {
                        fieldList = dbManager.getFieldList(db, onlineTaskEntity.getTargetTableOrTopic());
                    }
                } catch (SQLException e) {
                    throw new BusinessException(e.getMessage());
                }
            }
            // hive 配置
            if (onlineTaskEntity.getTargetDsType().equals(DSType.Hive.name())) {
                HiveManager hiveManager = (HiveManager) dbManager;
                Map<String, String> metaXmlInfo = hiveManager.getMetaXmlInfo(hiveSiteXmlPath);
                String hiveMetastoreUris = metaXmlInfo.get(HiveSiteKeyEnum.METASORE_URIS.getKey());
                onlineTaskParam.setHiveMetastoreUris(hiveMetastoreUris);
                onlineTaskParam.setHiveSitePath(hiveSiteXmlPath);
                //        onlineTaskParam.setHdfsUrl(
                //            "hdfs:"
                //                + dataSource.getHdfsConfig().getServer()
                //                + ":"
                //                + dataSource.getHdfsConfig().getPort());
            } else if (onlineTaskEntity.getTargetDsType().equals(DSType.IoTDB.name())) {
                onlineTaskParam.setTargetServer(dataSource.getRdbmsConfig().getServer());
                onlineTaskParam.setTargetPort(dataSource.getRdbmsConfig().getPort());
        /* DataSourceManagerDomain managerInfo =
            dataSourceService.getManagerInfo(onlineTaskEntity.getTargetDsId());
        IoTDBSessionManager ioTDBSessionManager = managerInfo.getIoTDBSessionManager();
        try {
          fieldList =
              ioTDBSessionManager.getFiledList(
                  String.join(".", onlineTaskEntity.getTargetTableOrTopic(), "**"));
        } catch (Exception e) {
          throw new BusinessException(e.getMessage());
        } */
            }

            List<String> allColumn = Lists.newArrayList();
            List<String> allColumnType = Lists.newArrayList();
            if (CollectionUtils.isNotEmpty(fieldList)) {
                fieldList.forEach(
                    field -> {
                        allColumn.add(field.getFieldName());
                        allColumnType.add(field.getFieldType());
                    });
            }
            onlineTaskParam.setAllColumn(allColumn);
            onlineTaskParam.setAllColumnType(allColumnType);
        }

        return onlineTaskParam;
    }

    private String getFlinkJobId(Long xxlJobId) {
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
        String format = sdf.format(new Date());
        String logPath = xxlJobLogPath + "/" + format;
        String fileName = String.valueOf(xxlJobId) + ".log";
        File file = new File(logPath + IOUtils.DIR_SEPARATOR + fileName);
        List<String> logs = Lists.newArrayList();
        String jobId = StringUtils.EMPTY;
        try {
            logs = FileUtils.readLines(file, "UTF-8");
            for (String s : logs) {
                int i = s.indexOf("Job has been submitted with JobID");
                if (i != -1) {
                    String[] split = s.split("Job has been submitted with JobID");
                    jobId = split[1];
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
        return jobId;
    }

    private List<String> runCmd(String user, String... args) throws Exception {
        String cmd = "/usr/bin/su";
        final Joiner SPACE_JOINER = Joiner.on(" ");
        String join = SPACE_JOINER.join(args);
        log.info("join:" + join);
        // String[] cmdArgs = new String[] {"-", user, "-s", "/bin/bash", "-c",
        // SPACE_JOINER.join(args)};
        String[] cmdArgs = new String[]{"-", user, SPACE_JOINER.join(args)};
        try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
            ProcBuilder builder = new ProcBuilder(cmd).withArgs(cmdArgs).withTimeoutMillis(600000);
            builder.withOutputStream(out);
            ProcResult result = builder.run();
            if (result.getExitValue() == 0) {
                log.info("execute command {} {} success", cmd, Arrays.toString(cmdArgs));
                String outStr = out.toString(StandardCharsets.UTF_8.name());
                String[] lines = outStr.split("\\r?\\n");
                return Lists.newArrayList(lines);
            }
            String msg =
                String.format(
                    "execute command %s %s error, exitCode %s outStr %s errStr %s",
                    cmd,
                    Arrays.toString(cmdArgs),
                    result.getExitValue(),
                    result.getOutputString(),
                    result.getErrorString());
            log.error(msg);
            throw new Exception(msg);
        }
    }

    public List<FlinkDataSyncTaskEntity> getAllPublishTask() {
        return flinkDataSyncTaskMapper.getAllPublishTask();
    }

    public List<FlinkDataSyncSimpleDomain> getAllPublishTask(Long projectId) {
        List<FlinkDataSyncTaskEntity> allPublishTask =
            flinkDataSyncTaskMapper.getAllPublishTask(projectId);
        List<FlinkDataSyncSimpleDomain> flinkDataSyncSimpleDomains =
            ConvertUtil.copyProperties(allPublishTask, FlinkDataSyncSimpleDomain.class);
        return flinkDataSyncSimpleDomains;
    }

    public List<List> previewJsonDataForSp(PreviewDataForMqParam param) {
        List<PreviewDataForMqParam.fieldInfo> fieldInfoList = param.getFieldInfoList();
        // jsonPath 配置
        Configuration conf = Configuration.builder().options(Option.DEFAULT_PATH_LEAF_TO_NULL).build();
        LinkedList<List> lists;
        if (param.getAnaysisType() == 0) {
            lists = analysisJson(Lists.newArrayList(param.getJsonData()), param, conf);
        } else {
            lists = callJSFunction(param.getJsonContent(), param.getJsonData());
        }
        if (param.getAnaysisType() == 0) {
            List<String> fieldNames =
                fieldInfoList.stream()
                    .map(PreviewDataForMqParam.fieldInfo::getFieldName)
                    .collect(Collectors.toList());
            lists.addFirst(fieldNames);
        }
        return lists;
    }

    public List<List> previewDataForMq(PreviewDataForMqParam param) {

        String mqType = param.getMqType();
        List<PreviewDataForMqParam.fieldInfo> fieldInfoList = param.getFieldInfoList();
        // jsonPath 配置
        Configuration conf = Configuration.builder().options(Option.DEFAULT_PATH_LEAF_TO_NULL).build();
        Long dsId = param.getDsId();
        DataSourceDetailDomain datasource = dataSourceService.getDataSourceDetailById(dsId, false);
        String dsLink = datasource.getDsLink();
        LinkedList<List> lists = Lists.newLinkedList();
        if (mqType.equalsIgnoreCase(DSType.Kafka.name())) {
            List<String> preview = KafkaUtil.preview(dsLink, param.getTopic());
            // 解析josn
            if (CollectionUtils.isEmpty(preview)) {
                throw new BusinessException("未获取到KAFKA内数据");
            } else {
                if (param.getAnaysisType() == 0) {
                    lists = analysisJson(preview, param, conf);
                } else {
                    // fixme get(0) 获取字段名、前端映射需要
                    lists = callJSFunction(param.getJsonContent(), preview.get(0));
                }
            }
        } else {
            List<String> preview = Lists.newArrayList();
            try {
                // preview = MQTTUtil.preview(dsLink, param.getTopic());
                preview = MQTTUtil.preview(dsLink, param.getTopic());
            } catch (Exception e) {
                log.error(e.getMessage());
                log.error(e.getMessage());
                throw new BusinessException("出现异常:" + e.getMessage());
            }
            if (CollectionUtils.isEmpty(preview)) {
                log.info("preview:" + preview);
                throw new BusinessException("30s内未获取到MQTT消息，请稍后重试");
            } else {
                if (param.getAnaysisType() == 0) {
                    lists = analysisJson(preview, param, conf);
                } else {
                    lists = callJSFunction(param.getJsonContent(), preview.get(0));
                }
            }
        }
        if (param.getAnaysisType() == 0) {
            List<String> fieldNames =
                fieldInfoList.stream()
                    .map(fieldInfo -> fieldInfo.getFieldName())
                    .collect(Collectors.toList());
            lists.addFirst(fieldNames);
        }
        return lists;
    }

    //    @BizDataIndexAnnotation(
//        type = BizDataIndexTypeEnum.ONLINE_TASK,
//        dataIdFieldName = "id",
//        dataIdParamIndex = 0,
//        operateType = BizDataIndexAnnotation.OperateType.DELETE)
    public Boolean deleteDir(Long id) {
        Long projectId = ServletUtils.getProjectId();
        List<DirectoryEntity> childrenById =
            directoryService.getChildrenById(id, projectId, DirectoryMenuEnum.ONLINE);
        List<Long> ids = childrenById.stream().map(DirectoryEntity::getId).collect(Collectors.toList());
        ids.add(id);
        Boolean flag = flinkDataSyncTaskMapper.checkDirIsEmpty(ids);
        if (!flag) {
            throw new BusinessException("该目录下存在实时采集任务，无法删除");
        }
        Boolean result = directoryService.deleteDir(id, DirectoryMenuEnum.ONLINE);
        return result;
    }

    public OnlineTaskLogDomain getTaskLog(Long recordId, Integer offset) {

        int start = 1;
        if (offset != null && !offset.equals(0)) {
            start = offset;
        }
        OnlineTaskLogDomain response = new OnlineTaskLogDomain();
        response.setLogEnd(false);
        response.setOffset(offset);
        response.setLogContent(Lists.newArrayList());
        FlinkDataSyncTaskEntity byRecordId = flinkDataSyncTaskMapper.getByRecordId(recordId);
        // OnlineTaskRunningRecordEntity mqRecord = onlineTaskRunningRecordMapper.findById(recordId);
        Integer status =
            Objects.nonNull(byRecordId) ? byRecordId.getTaskStatus() : byRecordId.getTaskStatus();
        if (!Objects.equals(OfflineDevJobStatusEnum.RUNNING.getCode(), status)) {
            response.setLogEnd(true);
        }
        // 仅仅只是读取单个子任务的日志
        List<String> logs;
        String dir = logConfig.getOnline();
        // flink cdc
        if (!byRecordId.getSourceDsType().equalsIgnoreCase(DSType.Kafka.name())
            && !byRecordId.getSourceDsType().equalsIgnoreCase(DSType.MQTT.name())) {

            if (Objects.equals(OfflineDevJobStatusEnum.RUNNING.getCode(), status)) {
                response.setLogEnd(false);
                try {
                    String flinkDir = flinkConfig.getFlinkDir();
                    List<String> logFiles = getLogFiles(flinkDir, ".*standalonesession.*\\.log");
                    if (CollectionUtils.isNotEmpty(logFiles)) {
                        String s = logFiles.get(0);
                        logs = logService.read(s, offset);
                        if (CollectionUtils.isNotEmpty(logs)) {
                            response.setOffset(logs.size() + start);
                            response.setLogContent(logs);
                        }
                    }
                    return response;
                } catch (Exception e) {
                    log.error(e.getMessage());
                    throw new BusinessException("读取日志异常");
                }
            } else if (Objects.equals(OfflineDevJobStatusEnum.FAILURE.getCode(), status)) {
                // 从本地读取
                response.setLogEnd(true);
                String flinkLog =
                    flinkConfig.getFlinkLog() + IOUtils.DIR_SEPARATOR + recordId
                        + ".log";
                try {

                    logs = logService.read(flinkLog, offset);
                    response.setLogContent(logs);
                } catch (IOException e) {
                    log.error(e.getMessage());
                    throw new BusinessException("读取日志异常");
                }
                if (CollectionUtils.isNotEmpty(logs)) {
                    response.setOffset(logs.size() + start);
                }
                return response;
            }
        }

        String subTaskJobLogPath = String.format("%s/%s.log", dir, recordId);

        try {
            logs = logService.read(subTaskJobLogPath, offset);
        } catch (IOException e) {
            log.error(e.getMessage());
            throw new BusinessException("读取日志异常");
        }
        if (CollectionUtils.isNotEmpty(logs)) {
            response.setOffset(logs.size() + start);
            response.setLogContent(logs);
        }

        return response;
    }

    // 检查运行中的kafka、mqtt任务 运行状态
    public void checkTask() {
        List<FlinkDataSyncTaskEntity> mqTaskRunningList =
            flinkDataSyncTaskMapper.getMqTaskRunningList();
        mqTaskRunningList.forEach(
            mqTask -> {
                try {
                    Point point = JmxUtil.checkPoint(mqTask.getJmxPort());
                    if (org.apache.commons.lang.StringUtils.isNotBlank(point.getType())
                        && (point.getType().equalsIgnoreCase(DSType.Kafka.name())
                        || point.getType().equalsIgnoreCase(DSType.Kafka.name()))) {
                        mqTask.setDelay(point.getDelay());
                    }
                    mqTask.setFailNum(point.getFailNum());
                    mqTask.setLastSyncTime(point.getLastSyncTime());
                    flinkDataSyncTaskMapper.updateById(mqTask);
                } catch (BusinessException e) {
//                    if ("10000".equalsIgnoreCase(String.valueOf(e.getStatus()))) {
                    mqTask.setTaskRunStatus(OfflineSyncTaskStatus.failure.getTaskStatus());
                    mqTask.setJmxPort(-1);
                    mqTask.setEndTime(new Date());
                    flinkDataSyncTaskMapper.updateById(mqTask);
//                    }
                }
            });
    }

    public List<String> getLogFiles(String logDirPath, String regex) {
        Pattern pattern = Pattern.compile(regex);
        File logDir = new File(logDirPath);

        File[] logFiles = logDir.listFiles(new FilenameFilter() {
            @Override
            public boolean accept(File dir, String name) {
                Matcher matcher = pattern.matcher(name);
                boolean matches = matcher.matches();
                System.out.println("Checking file: " + name + ", matches: " + matches);
                return matches;
            }
        });

        List<String> logFilePaths = new ArrayList<>();
        if (logFiles != null) {
            for (File logFile : logFiles) {
                logFilePaths.add(logFile.getAbsolutePath());
            }
        } else {
            System.out.println("No log files found in directory: " + logDirPath);
        }
        return logFilePaths;
    }

    public static LinkedList<List> analysisJson(
        List<String> preview, PreviewDataForMqParam param, Configuration conf) {

        List<String> subList = Lists.newArrayList();
        subList = preview;
        if (preview.size() > 5) {
            subList = preview.subList(0, 5);
        }
        List<PreviewDataForMqParam.fieldInfo> fieldInfoList = param.getFieldInfoList();

        LinkedList<List> rows = Lists.newLinkedList();
        for (String json : subList) {
            ArrayList<Object> row = Lists.newArrayList();
            fieldInfoList.forEach(
                fieldInfo -> {
                    String jsonExp = fieldInfo.getFieldName();
                    Object read = using(conf).parse(json).read(fieldInfo.getFieldExpress());
                    row.add(read);
                });
            rows.add(row);
        }
        return rows;
    }

    private FlinkTableDomain parseToFlinkTableDomain(TableInfoDomain from) {
        return FlinkTableDomain.builder()
            .tableName(from.getTableName())
            .schema(from.getSchema())
            .tblType(
                Optional.ofNullable(from.getTblType())
                    .map(TableInfoDomain.TblType::name)
                    .map(FlinkTableDomain.TblType::valueOf)
                    .orElse(null))
            .build();
    }

    private LinkedList<List> callJSFunction(String js, String json) {
        ScriptEngineManager scriptEngineManager = new ScriptEngineManager();
        ScriptEngine engine = scriptEngineManager.getEngineByName("javascript");
        // 执行js脚本定义函数
        Object res = null;
        try {
            engine.eval(js);
            Invocable invocable = (Invocable) engine;
            res = invocable.invokeFunction("parseJson", json);
        } catch (ScriptException e) {
            throw new BusinessException(e.getMessage());
        } catch (NoSuchMethodException e) {
            throw new BusinessException("函数名必须定义:parseJson");
        }
        LinkedList<List> lists = analysisJsObj(res);
        List fieldName = lists.get(0);
        List<String> fieldNameTrim = Lists.newArrayList();
        fieldName.forEach(
            item -> {
                String s = trimFirstAndLastChar((String) item, "\'");
                String s1 = trimFirstAndLastChar((String) item, "\"");
                fieldNameTrim.add(s1);
            });

        lists.remove(0);
        lists.add(0, fieldNameTrim);
        return lists;
    }

    private String trimFirstAndLastChar(String str, String element) {
        boolean beginIndexFlag = true;
        boolean endIndexFlag = true;
        do {
            int beginIndex = str.indexOf(element) == 0 ? 1 : 0;
            int endIndex =
                str.lastIndexOf(element) + 1 == str.length() ? str.lastIndexOf(element) : str.length();
            str = str.substring(beginIndex, endIndex);
            beginIndexFlag = (str.indexOf(element) == 0);
            endIndexFlag = (str.lastIndexOf(element) + 1 == str.length());
        } while (beginIndexFlag || endIndexFlag);
        return str;
    }

    private LinkedList<List> analysisJsObj(Object o) {
        AtomicInteger count = new AtomicInteger();
        ScriptObjectMirror sjm = (ScriptObjectMirror) o;
        LinkedList<List> result = Lists.newLinkedList();
        for (Map.Entry<String, Object> entry : sjm.entrySet()) {
            String i = entry.getKey();
            Object j = entry.getValue();
            ScriptObjectMirror inner = (ScriptObjectMirror) j;
            ArrayList<Object> node = Lists.newArrayList();
            count.getAndIncrement();
            inner.forEach(
                (m, n) -> {
                    Object obj = JSON.toJSON(n);
                    if (obj instanceof String) {
                        String s = obj.toString();
                        if (s.startsWith("\"") && s.endsWith("\"")) {
                            s = s.substring(1, s.length() - 1);
                        }
                        obj = s;
                    }
                    node.add(obj);
                });
            result.add(node);
            if (count.get() > 5) {
                break;
            }
        }
        return result;
    }

    public void getFlinkJObIds() throws Exception {
        List<FlinkDataSyncTaskEntity> flinkTaskRunList = flinkDataSyncTaskMapper.getFlinkTaskRunList();
        List<Long> recordIds =
            flinkTaskRunList.stream()
                .map(FlinkDataSyncTaskEntity::getRecordId)
                .collect(Collectors.toList());
        flinkTaskRunList.forEach(
            record -> {
                String flinkJobId = getFlinkJobId(Long.valueOf(record.getRecordId()));
                if (StringUtils.isNotBlank(flinkJobId)
                    && !Objects.equals(flinkJobId, record.getFlinkJobId())) {

                    record.setFlinkJobId(flinkJobId.trim());
                    flinkDataSyncTaskMapper.updateById(record);
                }
            });
    }

    public List<FlinkDataSyncTaskDomain> getListByPage(
        Long projectId, Integer pageSize, Integer pageNum) {

        Page<FlinkDataSyncTaskEntity> page =
            this.page(
                new Page<>(pageNum, pageSize),
                new LambdaQueryWrapper<FlinkDataSyncTaskEntity>()
                    .eq(FlinkDataSyncTaskEntity::getProjectId, projectId));
        return ConvertUtil.copyProperties(page.getRecords(), FlinkDataSyncTaskDomain.class);
    }

    //    public static void main(String[] args) {
    //      // jsonPath 配置
    //      Configuration conf =
    //   Configuration.builder().options(Option.DEFAULT_PATH_LEAF_TO_NULL).build();
    //      List<String> subList =
    //          Lists.newArrayList(
    //              "{\"id\":1,\"name\":\"qwepoi1\",\"age\":18}",
    //              "{\"id\":2,\"name\":\"qwepoi2\",\"age\":20}",
    //              "{\"id\":3,\"name\":\"qwepoi3\",\"age\":21}",
    //              "{\"id\":4,\"name\":\"qwepoi4\",\"age\":22}",
    //              "{\"id\":5,\"name\":\"qwepoi5\"}");
    //          ArrayList<String> exps = Lists.newArrayList("$.id", "$.name", "$.age");
    //          List<String> strings = subList.subList(0, 5);
    //          List<List> rows = Lists.newArrayList();
    //          for (String json : subList) {
    //            ArrayList<Object> row = Lists.newArrayList();
    //            exps.forEach(
    //                fieldInfo -> {
    //                  String jsonExp = fieldInfo;
    //                  Object read = using(conf).parse(json).read(jsonExp);
    //                  row.add(read);
    //                });
    //            rows.add(row);
    //          }
    //          System.out.println(rows);
    //      PreviewDataForMqParam param = new PreviewDataForMqParam();
    //      PreviewDataForMqParam.fieldInfo fieldInfo = new PreviewDataForMqParam.fieldInfo();
    //      fieldInfo.setFileName("id");
    //      fieldInfo.setJsonExp("$.id");
    //      PreviewDataForMqParam.fieldInfo fieldInfo2 = new PreviewDataForMqParam.fieldInfo();
    //      fieldInfo2.setFileName("age");
    //      fieldInfo2.setJsonExp("$.age");
    //      ArrayList<PreviewDataForMqParam.fieldInfo> fields = Lists.newArrayList();
    //      fields.add(fieldInfo);
    //      fields.add(fieldInfo2);
    //      param.setFieldInfoList(fields);
    //      LinkedList<List> lists = analysisJson(subList, param, conf);
    //      System.out.println(lists);
    //    }

    public List<FlinkDataSyncTaskEntity> getOnlineSyncList(Long projectId) {
        return this.list(
            new LambdaQueryWrapper<FlinkDataSyncTaskEntity>()
                .eq(FlinkDataSyncTaskEntity::getProjectId, projectId));
    }

    public JSONObject getOnlineSyncStatus(Long id) {
        String msg = this.getById(id).getTaskStatus() == 1 ? "未运行" : "运行中";
        JSONObject jsonObject = new JSONObject();
        jsonObject.put("status", "运行中".equals(msg) ? 2 : 3);
        jsonObject.put("msg", msg);
        return jsonObject;
    }
}
