package com.sh.data.engine.domain.datadev.flink.service.impl;

import cn.hutool.core.convert.Convert;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.google.common.collect.Lists;
import com.sh.data.engine.common.enumDefinition.DSType;
import com.sh.data.engine.common.enumDefinition.FlinkJobStatusEnum;
import com.sh.data.engine.common.enumDefinition.FlinkNodeTopEnum;
import com.sh.data.engine.common.enumDefinition.FlinkNodeTypeEnum;
import com.sh.data.engine.common.exception.BusinessException;
import com.sh.data.engine.common.util.FlinkUtil.FlinkCheckpointInfo;
import com.sh.data.engine.common.util.ServletUtils;
import com.sh.data.engine.domain.base.model.PageResult;
import com.sh.data.engine.domain.base.model.UserContext;
import com.sh.data.engine.domain.datadev.flink.model.domain.*;
import com.sh.data.engine.domain.datadev.flink.model.param.*;
import com.sh.data.engine.domain.datadev.flink.service.*;
import com.sh.data.engine.domain.datadev.flinkTaskAboutExexte.FlinkTaskApiService;
import com.sh.data.engine.domain.integration.datasource.model.domain.DataSourceDetailDomain;
import com.sh.data.engine.domain.integration.datasource.model.domain.DataSourceManagerDomain;
import com.sh.data.engine.domain.integration.datasource.model.domain.DataSourceMultiDomain;
import com.sh.data.engine.domain.integration.datasource.model.domain.DataSourceQueryDomain;
import com.sh.data.engine.domain.integration.datasource.service.DataSourceService;
import com.sh.data.engine.domain.integration.offline.model.domain.OfflineTaskExistTableMappingDomain;
import com.sh.data.engine.domain.shims.DbManagerFactory;
import com.sh.data.engine.domain.shims.db.BaseDbManager;
import com.sh.data.engine.domain.shims.db.model.FieldInfoDomain;
import com.sh.data.engine.domain.shims.db.model.PreviewDataDomain;
import com.sh.data.engine.domain.shims.db.model.TableInfoDomain;
import com.sh.data.engine.domain.shims.hbase.manager.HBaseManager;
import com.sh.data.engine.domain.shims.kafka.util.KafkaUtil;
import com.sh.data.engine.domain.shims.mongo.manager.MongoManager;
import com.sh.data.engine.domain.util.ConvertUtil;
import com.sh.data.engine.domain.util.LogUtil;
import com.sh.data.engine.domain.workspace.manager.model.domain.UserDomain;
import com.sh.data.engine.domain.workspace.manager.service.UserService;
import com.sh.data.engine.infrastructure.config.FlinkConfig;
import com.sh.data.engine.repository.dao.datadev.entity.FlinkJobEntity;
import com.sh.data.engine.repository.dao.datadev.entity.FlinkMenuNodeEntity;
import com.sh.data.engine.repository.dao.datadev.entity.FlinkNodeConfigEntity;
import jakarta.servlet.http.HttpServletResponse;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.time.DateFormatUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import java.io.*;
import java.nio.charset.Charset;
import java.sql.SQLException;
import java.util.*;
import java.util.stream.Collectors;

/**
 * @author：jingchen.hr
 * @descripion:
 * @data:6/10/20224:14 PM
 */
@Service
@Slf4j
public class FlinkTaskV1ServiceImpl implements FlinkTaskV1Service {

    @Autowired
    private FlinkNodeConfigService flinkNodeConfigService;

    @Autowired
    private FlinkSqlTaskService flinkSqlTaskService;

    @Autowired
    private DataSourceService dataSourceService;

    @Autowired
    private FlinkNodeService flinkNodeService;

    @Autowired
    private FlinkTaskService flinkTaskService;

    @Autowired
    private UserService userService;

    @Autowired
    private FlinkFunctionService flinkFunctionService;

    @Autowired
    private FlinkResourceService flinkResourceService;

    @Autowired
    private FlinkService flinkService;

    @Autowired
    private FlinkTaskApiService flinkTaskApi;

    @Autowired
    private FlinkJobService flinkJobService;

    @Autowired
    private FlinkConfig flinkConfig;

    @Value("${flink.task.sql.filepath:/data/hufu_file_storage/flinksql}")
    private String sqlDirPath;

    @Override
    public Boolean saveSqlTaskScript(SaveScriptDomain request, String userId, Long projectId) {
        flinkSqlTaskService.saveScript(request.getNodeId(), request.getScript(), userId, projectId);
        return true;
    }

    @Override
    public Boolean saveSource(SaveSourceDomain request, String userId, Long projectId) {

        final List<SaveSourceDomain.Source> sources = request.getSources();

        String content = StringUtils.EMPTY;

        if (CollectionUtils.isNotEmpty(sources)) {

            List<FlinkSourceConfigDomain> configDomains = Lists.newLinkedList();

            for (SaveSourceDomain.Source source : sources) {
                final String dsType = source.getDsType();
                final DSType dsTypeEnum = DSType.valueOf(dsType);
                switch (dsTypeEnum) {
                    case Kafka:
                        final FlinkKafkaSourceConfigDomain kafkaSourceConfigDomain =
                            ConvertUtil.copyProperties(source, FlinkKafkaSourceConfigDomain.class);
                        kafkaSourceConfigDomain.setTopic(source.getKafkaSource().getTopic());
                        kafkaSourceConfigDomain.setOffset(source.getKafkaSource().getOffset());
                        kafkaSourceConfigDomain.setStartPointTime(source.getKafkaSource().getStartPointTime());
                        kafkaSourceConfigDomain.setTimeType(source.getKafkaSource().getTimeType());
                        kafkaSourceConfigDomain.setTimeField(source.getKafkaSource().getTimeField());
                        kafkaSourceConfigDomain.setDelayMs(source.getKafkaSource().getDelayMs());
                        kafkaSourceConfigDomain.setPartitionOffsetList(source.getPartitionOffsetList());
                        configDomains.add(kafkaSourceConfigDomain);
                        break;
                }
            }

            content = JSON.toJSONString(configDomains);
        }

        flinkSqlTaskService.saveSource(request.getNodeId(), content, userId, projectId);
        return true;
    }

    @Override
    public Boolean saveSink(SaveSinkDomain request, String userId, Long projectId) {
        final List<SaveSinkDomain.Sink> sinks = request.getSinks();

        String content = StringUtils.EMPTY;

        if (CollectionUtils.isNotEmpty(sinks)) {
            List<FlinkSinkConfigDomain> configDomains = Lists.newLinkedList();

            for (SaveSinkDomain.Sink sink : sinks) {
                final String dsType = sink.getDsType();
                final DSType dsTypeEnum = DSType.valueOf(dsType);
                switch (dsTypeEnum) {
                    case Kafka:
                        final FlinkKafkaSinkConfigDomain flinkKafkaSinkConfigDomain =
                            ConvertUtil.copyProperties(sink, FlinkKafkaSinkConfigDomain.class);
                        flinkKafkaSinkConfigDomain.setTopic(sink.getKafkaSink().getTopic());
                        configDomains.add(flinkKafkaSinkConfigDomain);
                        break;
                    case MySQL:
                    case Oracle:
                        final FlinkJDBCSinkConfigDomain flinkJDBCSinkConfigDomain =
                            ConvertUtil.copyProperties(sink, FlinkJDBCSinkConfigDomain.class);
                        flinkJDBCSinkConfigDomain.setDataMode(sink.getJdbcSink().getDataMode());
                        flinkJDBCSinkConfigDomain.setPrimaryFields(sink.getJdbcSink().getPrimaryFields());
                        flinkJDBCSinkConfigDomain.setTableName(sink.getJdbcSink().getTableName());
                        configDomains.add(flinkJDBCSinkConfigDomain);
                        break;
                    case HBase:
                        // row key
                        String rowKeyName = sink.getRowKeyName();
                        String rowKeyType = sink.getRowKeyType();
                        if (StringUtils.isBlank(rowKeyName) || StringUtils.isBlank(rowKeyType)) {
                            throw new BusinessException("未配置row key的name/type");
                        }

                        // 要是没有在任何的column family中填写column前端目前会给[]
                        if (CollectionUtils.isEmpty(sink.getFieldList())) {
                            throw new BusinessException("没有在列族中填写列");
                        }

                        final FlinkHbaseSinkConfigDomain flinkHbaseSinkConfigDomain =
                            ConvertUtil.copyProperties(sink, FlinkHbaseSinkConfigDomain.class);
                        flinkHbaseSinkConfigDomain.setDataMode(sink.getHbaseSink().getDataMode());
                        flinkHbaseSinkConfigDomain.setPrimaryFields(sink.getHbaseSink().getPrimaryFields());
                        flinkHbaseSinkConfigDomain.setTableName(sink.getHbaseSink().getTableName());
                        configDomains.add(flinkHbaseSinkConfigDomain);
                        break;
                    case Hive:
                        FlinkHiveSinkConfigDomain flinkHiveSinkConfigDomain =
                            ConvertUtil.copyProperties(sink, FlinkHiveSinkConfigDomain.class);
                        flinkHiveSinkConfigDomain.setDataMode(sink.getHiveSink().getDataMode());
                        flinkHiveSinkConfigDomain.setPrimaryFields(sink.getHiveSink().getPrimaryFields());
                        flinkHiveSinkConfigDomain.setTableName(sink.getHiveSink().getTableName());
                        // flinkHiveSinkConfigDomain.setPartitionFields(sink.getHiveSink().getPartitionFields());
                        configDomains.add(flinkHiveSinkConfigDomain);

                        // hive在flinksql中目前只能是通过catlog的形式读取写入,那么言下之意只能是用在自带的hive上
                        // todo 验证 catlog写入是否对hive有要求
                        //            Integer dsConfigId =
                        //                tbDatasourceBaseService
                        //                    .getByDsId(flinkHiveSinkConfigDomain.getDsId())
                        //                    .getDsConfigId();
                        //            if (!Integer.valueOf(0)
                        //                .equals(tbDatasourceHiveService.getById(dsConfigId).getHiveType())) {
                        //              throw new BusinessException("flink sql
                        // 只能向自带的hive写入");
                        //            }

                        break;
                }
            }

            content = JSON.toJSONString(configDomains);
        }
        flinkSqlTaskService.saveSink(request.getNodeId(), content, userId, projectId);
        return true;
    }

    @Override
    public Boolean saveDim(SaveDimDomain request, String userId, Long projectId) {

        final List<SaveDimDomain.Dim> dims = request.getDims();
        String content = StringUtils.EMPTY;
        if (CollectionUtils.isNotEmpty(dims)) {

            List<FlinkDimConfigDomain> configDomains = Lists.newLinkedList();

            for (SaveDimDomain.Dim dim : dims) {
                final String dsType = dim.getDsType();
                final DSType dsTypeEnum = DSType.valueOf(dsType);
                switch (dsTypeEnum) {
                    case MySQL:
                    case Oracle:
                    case Hive:
                        final FlinkJDBCDimConfigDomain jdbcDimConfigDomain =
                            ConvertUtil.copyProperties(dim, FlinkJDBCDimConfigDomain.class);
                        jdbcDimConfigDomain.setTableName(dim.getJdbcDimDomain().getTableName());
                        configDomains.add(jdbcDimConfigDomain);
                        break;
                }
            }

            content = JSON.toJSONString(configDomains);
        }
        flinkSqlTaskService.saveDim(request.getNodeId(), content, userId, projectId);
        return true;
    }

    @Override
    public Boolean saveEnv(SaveEnvDomain request, String userId, Long projectId) {
        final FlinkNodeDomain domain = flinkNodeService.getFinkNodeById(request.getNodeId());

        if (Objects.isNull(domain)) {
            throw new BusinessException("数据不存在");
        }
        final Integer nodeType = domain.getNodeType();
        final FlinkNodeTypeEnum type = FlinkNodeTypeEnum.getByNodeType(nodeType);
        switch (type) {
            case TASK_SQL:
                flinkSqlTaskService.saveEnv(request.getNodeId(), request.getEnv(), userId, projectId);
                break;
            case TASK_JAR:
                flinkTaskService.saveEnv(request.getNodeId(), request.getEnv(), userId, projectId);
                break;
        }
        return true;
    }

    @Override
    public Boolean saveSchedule(SaveScheduleDomain request, String userId, Long projectId) {

        final FlinkNodeDomain domain = flinkNodeService.getFinkNodeById(request.getNodeId());
        if (null == domain) {
            throw new BusinessException("数据不存在");
        }

        FlinkScheduleConfigDomain configDomain =
            ConvertUtil.copyProperties(request, FlinkScheduleConfigDomain.class);
        String content = JSON.toJSONString(configDomain);

        final Integer nodeType = domain.getNodeType();
        final FlinkNodeTypeEnum type = FlinkNodeTypeEnum.getByNodeType(nodeType);
        switch (type) {
            case TASK_SQL:
                flinkSqlTaskService.saveSchedule(request.getNodeId(), content, userId, projectId);
                flinkSqlTaskService.reCalculateNextStartAndStopTime(request.getNodeId(), true);
                break;
            case TASK_JAR:
                flinkTaskService.saveSchedule(request.getNodeId(), content, userId, projectId);
                flinkTaskService.reCalculateNextStartAndStopTime(request.getNodeId(), true);
                break;
        }

        return true;
    }

    @Override
    public FlinkTaskDetailDomain getTaskDetail(Long nodeId, Long versionId) {

        final FlinkNodeDomain flinkMenuNodeDomain = flinkNodeService.getFinkNodeById(nodeId);
        if (null == flinkMenuNodeDomain) {
            throw new BusinessException("数据不存在");
        }

        UserDomain user = userService.getUserById(flinkMenuNodeDomain.getCreatorId());
        String username = null;
        if (Objects.nonNull(user)) {
            username = user.getUserName();
        }

        FlinkTaskDetailDomain response = new FlinkTaskDetailDomain();
        response.setNodeId(nodeId);
        response.setName(flinkMenuNodeDomain.getName());
        response.setNodeType(flinkMenuNodeDomain.getNodeType());
        response.setUsername(username);
        response.setCreateTime(flinkMenuNodeDomain.getCreateTime());
        response.setUpdateTime(flinkMenuNodeDomain.getUpdateTime());
        response.setRemark(flinkMenuNodeDomain.getRemark());
        response.setPid(flinkMenuNodeDomain.getPid());
        response.setIsLocked(
            Objects.nonNull(flinkMenuNodeDomain.getIsLocked()) ? flinkMenuNodeDomain.getIsLocked() : 0);
        final Integer nodeType = flinkMenuNodeDomain.getNodeType();

        if (Objects.equals(FlinkNodeTypeEnum.TASK_SQL.getCode(), nodeType)) {
            FlinkSqlTaskDomain flinkSqlTaskDomain;
            if (null == versionId) {
                flinkSqlTaskDomain = flinkSqlTaskService.getActiveByNodeId(nodeId);
            } else {
                flinkSqlTaskDomain = flinkSqlTaskService.getById(versionId);
            }
            if (null != flinkSqlTaskDomain) {
                response.setIsPublished(flinkSqlTaskDomain.getIsPublished());
            }
        }

        if (Objects.equals(FlinkNodeTypeEnum.TASK_JAR.getCode(), nodeType)) {
            final FlinkTaskDomain flinkTaskDomain;
            if (null == versionId) {
                flinkTaskDomain = flinkTaskService.getActiveByNodeId(nodeId);
            } else {
                flinkTaskDomain = flinkTaskService.getById(versionId);
            }

            if (null != flinkTaskDomain) {
                response.setResourceNodeId(flinkTaskDomain.getResourceNodeId());
                if (null != flinkTaskDomain.getResourceNodeId()) {

                    FlinkNodeDomain resourceNodeDomain =
                        flinkNodeService.getFinkNodeById(flinkTaskDomain.getResourceNodeId());
                    response.setResourceName(resourceNodeDomain.getName());
                }
                response.setMainClass(flinkTaskDomain.getMainClass());
                response.setUpdateTime(flinkTaskDomain.getUpdateTime());
                response.setIsPublished(flinkTaskDomain.getIsPublished());
            }
            return response;
        }

        return response;
    }

    @Override
    public String getSqlTaskScrpit(Long nodeId, Long versionId) {

        final FlinkSqlTaskDomain flinkSqlTaskDomain;
        if (null == versionId) {
            flinkSqlTaskDomain = flinkSqlTaskService.getActiveByNodeId(nodeId);
        } else {
            flinkSqlTaskDomain = flinkSqlTaskService.getById(versionId);
        }

        if (null != flinkSqlTaskDomain) {
            return flinkSqlTaskDomain.getScriptContent();
        }
        return null;
    }

    @Override
    public List<FlinkSourceDomain> getSourceDetail(Long nodeId, Long versionId) {

        final FlinkSqlTaskDomain flinkSqlTaskDomain;
        if (null == versionId) {
            flinkSqlTaskDomain = flinkSqlTaskService.getActiveByNodeId(nodeId);
        } else {
            flinkSqlTaskDomain = flinkSqlTaskService.getById(versionId);
        }

        if (null == flinkSqlTaskDomain || StringUtils.isBlank(flinkSqlTaskDomain.getSourceConfig())) {
            return Lists.newLinkedList();
        }

        final List<FlinkSourceConfigDomain> sourceConfigDomainList =
            flinkSqlTaskDomain.getSourceConfigDomainList();

        List<FlinkSourceDomain> responses = Lists.newLinkedList();

        for (FlinkSourceConfigDomain flinkSourceConfigDomain : sourceConfigDomainList) {

            FlinkSourceDomain response =
                ConvertUtil.copyProperties(flinkSourceConfigDomain, FlinkSourceDomain.class);

            response.setFieldList(flinkSourceConfigDomain.getFieldList());

            final String dsType = flinkSourceConfigDomain.getDsType();
            final DSType dsTypeEnum = DSType.valueOf(dsType);

            switch (dsTypeEnum) {
                case Kafka:
                    FlinkKafkaSourceConfigDomain kafkaSourceConfigDomain =
                        (FlinkKafkaSourceConfigDomain) flinkSourceConfigDomain;
                    final KafkaSourceDomain kafkaSourceDomain =
                        ConvertUtil.copyProperties(kafkaSourceConfigDomain, KafkaSourceDomain.class);
                    response.setKafkaSource(kafkaSourceDomain);
                    break;
            }

            responses.add(response);
        }

        return responses;
    }

    @Override
    public List<FlinkSinkDomain> getSinkDetail(Long nodeId, Long versionId) {

        final FlinkSqlTaskDomain flinkSqlTaskDomain;
        if (null == versionId) {
            flinkSqlTaskDomain = flinkSqlTaskService.getActiveByNodeId(nodeId);
        } else {
            flinkSqlTaskDomain = flinkSqlTaskService.getById(versionId);
        }

        if (null == flinkSqlTaskDomain || StringUtils.isBlank(flinkSqlTaskDomain.getSinkConfig())) {
            return Lists.newLinkedList();
        }

        final List<FlinkSinkConfigDomain> sinkConfigDomainList =
            flinkSqlTaskDomain.getSinkConfigDomainList();

        List<FlinkSinkDomain> responses = Lists.newLinkedList();

        for (FlinkSinkConfigDomain flinkSinkConfigDomain : sinkConfigDomainList) {
            final FlinkSinkDomain flinkSinkResponse =
                ConvertUtil.copyProperties(flinkSinkConfigDomain, FlinkSinkDomain.class);
            flinkSinkResponse.setFieldList(flinkSinkConfigDomain.getFieldList());

            final String dsType = flinkSinkConfigDomain.getDsType();
            final DSType dsTypeEnum = DSType.valueOf(dsType);
            switch (dsTypeEnum) {
                case Kafka:
                    FlinkKafkaSinkConfigDomain kafkaSinkConfigDomain =
                        (FlinkKafkaSinkConfigDomain) flinkSinkConfigDomain;
                    final KafkaSinkDomain kafkaSinkDomain =
                        ConvertUtil.copyProperties(kafkaSinkConfigDomain, KafkaSinkDomain.class);
                    flinkSinkResponse.setKafkaSink(kafkaSinkDomain);
                    break;
                case Oracle:
                case MySQL:
                    FlinkJDBCSinkConfigDomain jdbcSinkConfigDomain =
                        (FlinkJDBCSinkConfigDomain) flinkSinkConfigDomain;
                    final JDBCSinkDomain jdbcSinkDomain =
                        ConvertUtil.copyProperties(jdbcSinkConfigDomain, JDBCSinkDomain.class);
                    flinkSinkResponse.setJdbcSink(jdbcSinkDomain);
                    break;
                case HBase:
                    FlinkHbaseSinkConfigDomain flinkHbaseSinkConfigDomain =
                        (FlinkHbaseSinkConfigDomain) flinkSinkConfigDomain;
                    HbaseSinkDomain hbaseSinkDomain =
                        ConvertUtil.copyProperties(flinkHbaseSinkConfigDomain, HbaseSinkDomain.class);
                    flinkSinkResponse.setHbaseSinkDomain(hbaseSinkDomain);
                    break;
                case Hive:
                    FlinkHiveSinkConfigDomain flinkHiveSinkConfigDomain =
                        (FlinkHiveSinkConfigDomain) flinkSinkConfigDomain;
                    HiveSinkDomain hiveSinkDomain =
                        ConvertUtil.copyProperties(flinkHiveSinkConfigDomain, HiveSinkDomain.class);
                    flinkSinkResponse.setHiveSinkDomain(hiveSinkDomain);
                    break;
            }
            responses.add(flinkSinkResponse);
        }

        return responses;
    }

    @Override
    public List<FlinkDimDomain> getDimDetail(Long nodeId, Long versionId) {
        final FlinkSqlTaskDomain flinkSqlTaskDomain;
        if (null == versionId) {
            flinkSqlTaskDomain = flinkSqlTaskService.getActiveByNodeId(nodeId);
        } else {
            flinkSqlTaskDomain = flinkSqlTaskService.getById(versionId);
        }

        if (null == flinkSqlTaskDomain || StringUtils.isBlank(flinkSqlTaskDomain.getDimConfig())) {
            return Lists.newLinkedList();
        }

        final List<FlinkDimConfigDomain> dimConfigDomainList =
            flinkSqlTaskDomain.getDimConfigDomainList();

        List<FlinkDimDomain> responses = Lists.newLinkedList();

        for (FlinkDimConfigDomain flinkDimConfigDomain : dimConfigDomainList) {
            FlinkDimDomain response =
                ConvertUtil.copyProperties(flinkDimConfigDomain, FlinkDimDomain.class);
            response.setFieldList(flinkDimConfigDomain.getFieldList());

            final String dsType = flinkDimConfigDomain.getDsType();
            final DSType dsTypeEnum = DSType.valueOf(dsType);
            switch (dsTypeEnum) {
                case Hive:
                    FlinkHiveDimConfigDomain dimConfigDomain =
                        (FlinkHiveDimConfigDomain) flinkDimConfigDomain;
                    final JDBCDimDomain dimDomain =
                        ConvertUtil.copyProperties(dimConfigDomain, JDBCDimDomain.class);
                    response.setJdbcDimDomain(dimDomain);
                    break;
                case Oracle:
                case MySQL:
                    FlinkJDBCDimConfigDomain jdbcDimConfigDomain =
                        (FlinkJDBCDimConfigDomain) flinkDimConfigDomain;
                    final JDBCDimDomain jdbcDimDomain =
                        ConvertUtil.copyProperties(jdbcDimConfigDomain, JDBCDimDomain.class);
                    response.setJdbcDimDomain(jdbcDimDomain);
                    break;
            }

            responses.add(response);
        }

        return responses;
    }

    @Override
    public String getEnvDetail(Long nodeId, Long versionId) {

        String env = null;

        final FlinkNodeDomain nodeDomain = flinkNodeService.getFinkNodeById(nodeId);
        final Integer nodeType = nodeDomain.getNodeType();
        final FlinkNodeTypeEnum nodeTypeEnum = FlinkNodeTypeEnum.getByNodeType(nodeType);

        switch (nodeTypeEnum) {
            case TASK_SQL:
                final FlinkSqlTaskDomain flinkSqlTaskDomain;
                if (null == versionId) {
                    flinkSqlTaskDomain = flinkSqlTaskService.getActiveByNodeId(nodeId);
                } else {
                    flinkSqlTaskDomain = flinkSqlTaskService.getById(versionId);
                }

                if (null != flinkSqlTaskDomain) {
                    env = flinkSqlTaskDomain.getEnvConfig();
                }

                break;
            case TASK_JAR:
                FlinkTaskDomain flinkTaskDomain;
                if (null == versionId) {
                    flinkTaskDomain = flinkTaskService.getActiveByNodeId(nodeId);
                } else {
                    flinkTaskDomain = flinkTaskService.getById(versionId);
                }
                if (null != flinkTaskDomain) {
                    env = flinkTaskDomain.getEnvConfig();
                }
                break;
        }

        return env;
    }

    @Override
    public FlinkScheduleDomain getScheduleDetail(Long nodeId, Long versionId) {

        FlinkScheduleConfigDomain scheduleConfigDomain = null;

        final FlinkNodeDomain nodeDomain = flinkNodeService.getFinkNodeById(nodeId);
        final Integer nodeType = nodeDomain.getNodeType();
        final FlinkNodeTypeEnum nodeTypeEnum = FlinkNodeTypeEnum.getByNodeType(nodeType);

        switch (nodeTypeEnum) {
            case TASK_SQL:
                final FlinkSqlTaskDomain flinkSqlTaskDomain;
                if (null == versionId) {
                    flinkSqlTaskDomain = flinkSqlTaskService.getActiveByNodeId(nodeId);
                } else {
                    flinkSqlTaskDomain = flinkSqlTaskService.getById(versionId);
                }

                if (null != flinkSqlTaskDomain) {
                    scheduleConfigDomain = flinkSqlTaskDomain.getScheduleConfigDomain();
                }

                break;
            case TASK_JAR:
                FlinkTaskDomain flinkTaskDomain;
                if (null == versionId) {
                    flinkTaskDomain = flinkTaskService.getActiveByNodeId(nodeId);
                } else {
                    flinkTaskDomain = flinkTaskService.getById(versionId);
                }
                if (null != flinkTaskDomain) {
                    scheduleConfigDomain = flinkTaskDomain.getScheduleConfigDomain();
                }
                break;
        }
        return ConvertUtil.copyProperties(scheduleConfigDomain, FlinkScheduleDomain.class);
    }

    @Override
    public IPage<FlinkSqlTaskVersionDomain> getSqlTaskVersion(
        Long id, Integer pageNum, Integer pageSize) {

        final IPage<FlinkSqlTaskDomain> versionsPageInfo =
            flinkSqlTaskService.getVersions(id, pageNum, pageSize);
        List<FlinkSqlTaskDomain> records = versionsPageInfo.getRecords();
        final List<String> userIds =
            records.stream().map(e -> e.getPublishUserId()).distinct().collect(Collectors.toList());
        Map<String, String> userMap = userService.getMapByUserIds(userIds);
        IPage<FlinkSqlTaskVersionDomain> convert =
            versionsPageInfo.convert(e -> getFlinkSqlTaskVersionDomain(e, userMap));
        return convert;
    }

    private FlinkSqlTaskVersionDomain getFlinkSqlTaskVersionDomain(
        FlinkSqlTaskDomain domain, Map<String, String> userMap) {
        FlinkSqlTaskVersionDomain convert = Convert.convert(FlinkSqlTaskVersionDomain.class, domain);
        convert.setUsername(userMap.get(domain.getPublishUserId()));
        return convert;
    }

    @Override
    public List<String> getDsType(Integer businessType) {

        DSType[] dsTypes = {};

        switch (businessType) {
            case 0:
                // flinkSqlTask 来源
                dsTypes = new DSType[]{DSType.Kafka};
                break;
            case 1:
                // flinkSqlTask 目标
                dsTypes = new DSType[]{DSType.Kafka, DSType.MySQL};
                break;
            case 2:
                // flinkSqlTask 维表
                dsTypes = new DSType[]{DSType.MySQL, DSType.Hive};
                break;
            // 实时采集来源
            case 3:
                dsTypes = new DSType[]{DSType.Kafka, DSType.MySQL, DSType.Oracle};
                break;
            // 实时采集目标
            case 4:
                dsTypes = new DSType[]{DSType.Kafka, DSType.Hive};
        }
        return Arrays.stream(dsTypes).map(t -> t.name()).collect(Collectors.toList());
    }

    @Override
    public List<FlinkDatabaseDomain> getDatabasesByDsType(String dsType, Long projectId) {
        DataSourceQueryDomain dataSourceMultiDomain = new DataSourceQueryDomain();
        dataSourceMultiDomain.setDsTypes(Lists.newArrayList(dsType));
        dataSourceMultiDomain.setProjectId(projectId);
        List<DataSourceMultiDomain> datasourceBaseDomains =
            dataSourceService.getDSByParams(dataSourceMultiDomain);
        List<FlinkDatabaseDomain> responses = new ArrayList<>();

        if (CollectionUtils.isNotEmpty(datasourceBaseDomains)) {
            for (DataSourceMultiDomain datasourceBaseDomain : datasourceBaseDomains) {
                //        if (DSType.Hive.name().equalsIgnoreCase(dsType)) {
                //          DatasourceHiveDomain datasourceHiveDomain =
                //                  tbDatasourceHiveService.getById(datasourceBaseDomain.getDsConfigId());
                //
                //          // 目前flinksql读取hive只能使用catlog,如果使用jdbc的话
                //          // flink在读取hive的url jdbc://hive2/时候会报错说不支持对应的dialect目前只有pg,mysql,pg
                //          if (DSOrigin.GONGWNAG.getKey().equals(datasourceHiveDomain.getHiveType())) {
                //            continue;
                //          }
                //        }
                FlinkDatabaseDomain response = new FlinkDatabaseDomain();
                response.setDsId(datasourceBaseDomain.getId());
                response.setDsName(datasourceBaseDomain.getDsName());
                response.setDsType(datasourceBaseDomain.getDsType());
                response.setDesc(datasourceBaseDomain.getDsDesc());
                response.setStatus(datasourceBaseDomain.getStatus());
                responses.add(response);
            }
        }
        return responses;
    }

    @Override
    @Deprecated
    public FlinkTopicDomain getTopicAndColumns(Long dsId) {

        FlinkTopicDomain response = new FlinkTopicDomain();

        List<FlinkTopicDomain.Field> fields = Lists.newLinkedList();

        // final DatasourceBaseDomain datasourceBaseDomain = tbDatasourceBaseService.getByDsId(dsId);
        final DataSourceDetailDomain datasourceBaseDomain =
            dataSourceService.getDataSourceDetailById(dsId, false);
        if (null == datasourceBaseDomain) {
            throw new BusinessException("数据不存在");
        }

        final String dsType = datasourceBaseDomain.getDsType();
        Set<String> set = KafkaUtil.kafkaTopic(datasourceBaseDomain.getDsLink());
        // fixme 这里和老版本不一样，敏捷版在创建kafka数据源时未定义topic和字段信息
        //    if (StringUtils.equalsIgnoreCase(dsType, DSType.Kafka.name())) {
        //      final DatasourceKafkaDomain datasourceKafkaDomain =
        //              tbDatasourceKafkaService.getById(datasourceBaseDomain.getDsConfigId());
        //      response.setTopic(datasourceKafkaDomain.getTopic());
        //
        //      final List<DatasourceKafkaFieldDomain> fieldDomains =
        //              tbDatasourceKafkaFieldService.getByDsId(dsId);
        //
        //      for (DatasourceKafkaFieldDomain fieldDomain : fieldDomains) {
        //        FlinkTopicDomain.Field field = new FlinkTopicDomain.Field();
        //        field.setFieldExpress(fieldDomain.getExpress());
        //        field.setFieldName(fieldDomain.getFieldName());
        //        field.setFieldType(fieldDomain.getFieldType());
        //        field.setKey(fieldDomain.getFieldName());
        //        field.setFieldDesc(fieldDomain.getFieldDesc());
        //        fields.add(field);
        //      }
        //    }

        response.setFields(fields);

        return response;
    }

    @Override
    public Set<String> getTopics(Long dsId) {

        final DataSourceDetailDomain datasourceBaseDomain =
            dataSourceService.getDataSourceDetailById(dsId, false);
        if (null == datasourceBaseDomain) {
            throw new BusinessException("数据源不存在");
        }

        return KafkaUtil.kafkaTopic(datasourceBaseDomain.getDsLink());
    }

    @Override
    public List<FlinkTableDomain> getTables(
        Long dsId, Long projectId, String schema, Integer isPartition) {
        final DataSourceDetailDomain datasourceBaseDomain =
            dataSourceService.getDataSourceDetailById(dsId, false);
        if (datasourceBaseDomain == null) {
            throw new BusinessException("数据不存在");
        }

        final String dsType = datasourceBaseDomain.getDsType();
        DataSourceManagerDomain managerInfo = dataSourceService.getManagerInfo(dsId);
        if (Objects.isNull(managerInfo)) {
            throw new BusinessException("获取数据源信息失败，请检查后重试！");
        }
        List<TableInfoDomain> tableAndViewList = Lists.newArrayList();
        BaseDbManager baseDbManager = managerInfo.getDbManager();
        MongoManager mongoManager = managerInfo.getMongoManager();
        HBaseManager hBaseManager = managerInfo.getHBaseManager();
        String dbName = managerInfo.getDbName();
        try {
            if (!Objects.isNull(baseDbManager)) { // base

                tableAndViewList = baseDbManager.getTableAndViewList(dbName, schema);
                if (managerInfo.getDsType().equalsIgnoreCase(DSType.Hive.name())) {
                    tableAndViewList =
                        tableAndViewList.stream()
                            .filter(i -> i.isPartitionTable())
                            .collect(Collectors.toList());
                }

            } else if (!Objects.isNull(mongoManager) || !Objects.isNull(hBaseManager)) { // mongo or hbase
                List<String> collectionNames = null;
                if (!Objects.isNull(mongoManager)) {
                    collectionNames = mongoManager.getCollectionList();
                } else {
                    collectionNames = hBaseManager.listCollectionNames();
                }
                tableAndViewList =
                    collectionNames.stream()
                        .map(
                            c -> {
                                TableInfoDomain tableInfo = new TableInfoDomain();
                                tableInfo.setTableName(c);
                                tableInfo.setTblType(TableInfoDomain.TblType.MANAGED_TABLE);
                                return tableInfo;
                            })
                        .collect(Collectors.toList());
            }
        } catch (Exception throwables) {
            log.error("实时计算，获取数据库表异常", throwables);
            throw new BusinessException(
                "实时计算，获取数据库[" + dbName + "]表异常，错误原因:" + throwables.getMessage() + ",请检查数据库连接是否正常！");
        }
        List<FlinkTableDomain> flinkTableDomains =
            tableAndViewList.stream()
                .filter(Objects::nonNull)
                .map(table -> new FlinkTableDomain(table.getTableName(), 0))
                .collect(Collectors.toList());
        return flinkTableDomains;
        // todo hive数据库分区表的过滤
        //    if (DSType.Hive.name().equalsIgnoreCase(dsType)) {
        //      // 只能是虎符的hive
        //      try {
        //        final ProjectDomain projectDomain = projectService.getProjectById(projectId);
        //        List<TableInfoEntity> tables =
        //                DsUtil.getHiveTableList(
        //                        hiveMetaDBJdbcUrl, hiveMetaDBUsername, hiveMetaDBPassword,
        // projectDomain.getName());
        //        if (org.apache.commons.collections4.CollectionUtils.isEmpty(tables)) {
        //          return Collections.emptyList();
        //        }
        //        if (null != isPartition && isPartition == 1) {
        //          return tables.stream()
        //                  .filter(Objects::nonNull)
        //                  .filter(e -> e.isPartitionTable())
        //                  .filter(e -> e.getTblType().equals("MANAGED_TABLE"))
        //                  .map(e -> new FlinkTableResponse(e.getTableName(), 0))
        //                  .collect(Collectors.toList());
        //        } else {
        //          return tables.stream()
        //                  .filter(Objects::nonNull)
        //                  .filter(e -> e.getTblType().equals("MANAGED_TABLE"))
        //                  .map(e -> new FlinkTableResponse(e.getTableName(), 0))
        //                  .collect(Collectors.toList());
        //        }
        //      } catch (Exception e) {
        //        log.error(e.getMessage(), e);
        //        throw new BusinessException("获取Hive表列表出错:" + e.getMessage());
        //      }
        //    }

    }

    @Override
    public List<FlinkTableFieldDomain> getTableFields(Long dsId, String tableName) {
        final DataSourceDetailDomain datasourceBaseDomain =
            dataSourceService.getDataSourceDetailById(dsId, false);
        if (datasourceBaseDomain == null) {
            throw new BusinessException("数据不存在");
        }
        DataSourceManagerDomain managerInfo = dataSourceService.getManagerInfo(dsId);
        BaseDbManager dbManager = managerInfo.getDbManager();
        MongoManager mongoManager = managerInfo.getMongoManager();
        if (StringUtils.isBlank(tableName)) {
            return Collections.emptyList();
        }
        if (Objects.isNull(dbManager)) {
            throw new BusinessException("当前数据源类型不支持表字段查询，请手动输入表字段！");
        }
        List<FlinkTableFieldDomain> flinktableFields = Lists.newArrayList();
        try {
            String dbName = managerInfo.getDbName();
            TableInfoDomain tableInfo = null;
            List<FieldInfoDomain> fieldList = null;
            boolean flag = dbManager.hasSchema();
            String schema = null;
            if (flag && tableName.contains(".")) {
                String[] split = tableName.split("\\.", 2);
                tableName = split[1];
                schema = split[0];
            }
            if (!Objects.isNull(dbManager)) {
                if (StringUtils.isNotBlank(schema)) {
                    tableInfo = dbManager.getTableInfoByTableName(dbName, schema, tableName);
                    fieldList = dbManager.getFieldList(dbName, schema, tableName);
                } else {
                    tableInfo = dbManager.getTableInfoByTableName(dbName, tableName);
                    fieldList = dbManager.getFieldList(dbName, tableName);
                }
            } else if (!Objects.isNull(mongoManager)) {
                tableInfo =
                    TableInfoDomain.builder()
                        .tableName(tableName)
                        .tblType(TableInfoDomain.TblType.MANAGED_TABLE)
                        .build();
                fieldList = mongoManager.getFieldList(dbName, tableName);
            }

            List<FieldInfoDomain> collect =
                fieldList.stream()
                    .sorted(Comparator.comparing(FieldInfoDomain::getFieldName))
                    .collect(Collectors.toList());
            List<FieldInfoDomain> partitionCollect =
                fieldList.stream()
                    .filter(FieldInfoDomain::isPartition)
                    .sorted(Comparator.comparing(FieldInfoDomain::getFieldName))
                    .collect(Collectors.toList());

            List<OfflineTaskExistTableMappingDomain.Field> fields =
                ConvertUtil.copyProperties(fieldList, OfflineTaskExistTableMappingDomain.Field.class);
            //      List<OfflineTaskExistTableMappingDomain.Field> partitionFields =
            //          ConvertUtil.copyProperties(
            //              partitionCollect, OfflineTaskExistTableMappingDomain.Field.class);
            // fields.addAll(partitionFields);
            flinktableFields =
                fields.stream()
                    .filter(Objects::nonNull)
                    .map(
                        field -> {
                            FlinkTableFieldDomain flinkFieldDomain = new FlinkTableFieldDomain();
                            flinkFieldDomain.setFieldName(field.getFieldName());
                            flinkFieldDomain.setFieldType(field.getFieldType());
                            flinkFieldDomain.setIsPartition(Boolean.TRUE.equals(field.getIsPartition()) ? 1 : 0);
                            flinkFieldDomain.setIsPk(field.isPk());
                            return flinkFieldDomain;
                        })
                    .collect(Collectors.toList());
        } catch (Exception throwables) {
            log.error("实时计算，获取数据表字段异常：", throwables);
            throw new BusinessException(
                "获取表[" + tableName + "]字段信息异常，错误原因:" + throwables.getMessage());
        }

        return flinktableFields;
    }

    @Override
    public List<String> getHbaseColumnFamilyNames(Long dsId, String tableName) {
        final DataSourceDetailDomain datasourceBaseDomain =
            dataSourceService.getDataSourceDetailById(dsId, false);
        if (datasourceBaseDomain == null) {
            throw new BusinessException("数据不存在");
        }
        HBaseManager hBaseManager = DbManagerFactory.getHBaseManager(datasourceBaseDomain.getDsLink());
        List<String> columnFamilyNames = Lists.newArrayList();

        try {
            columnFamilyNames = hBaseManager.getColumnFamilyNames(tableName);
        } catch (Exception e) {
            log.error(e.getMessage(), e);
            throw new BusinessException("获取hbase column family names错误");
        }
        return columnFamilyNames;
    }

    @Override
    public Boolean rollbackVersion(Long nodeId, Long versionId) {

        final FlinkNodeDomain nodeDomain = flinkNodeService.getFinkNodeById(nodeId);

        final Integer nodeType = nodeDomain.getNodeType();
        final FlinkNodeTypeEnum nodeTypeEnum = FlinkNodeTypeEnum.getByNodeType(nodeType);

        switch (nodeTypeEnum) {
            case TASK_SQL:
                flinkSqlTaskService.rollbackVersion(nodeId, versionId);
                break;
            case TASK_JAR:
                flinkTaskService.rollbackVersion(nodeId, versionId);
                break;
            case RESOURCE:
                flinkResourceService.rollbackVersion(nodeId, versionId);
                break;
            case FUNCTION:
                flinkFunctionService.rollbackVersion(nodeId, versionId);
                break;
        }
        return true;
    }

    @Override
    public FlinkCheckGrammarDomain checkGrammar(String sql) {
        final FlinkCheckGrammarDomain domain = flinkService.verifySql(sql);
        return ConvertUtil.copyProperties(domain, FlinkCheckGrammarDomain.class);
    }

    /**
     * 实时采集任务
     *
     * @param saveCollectionTaskParam
     * @return
     */
    @Override
    @Deprecated
    public Boolean saveFlinkCollectionTask(SaveCollectionTaskParam saveCollectionTaskParam) {
        return null;
    }

    @Override
    public List<FlinkSourceTableInfoDomain> getSqlTaskSourceTables(Long nodeId) {

        final FlinkSqlTaskDomain domain = flinkSqlTaskService.getActiveByNodeId(nodeId);
        final List<FlinkSourceConfigDomain> sourceConfigDomainList = domain.getSourceConfigDomainList();

        List<FlinkSourceTableInfoDomain> tableFieldList = new ArrayList<>();

        if (CollectionUtils.isNotEmpty(sourceConfigDomainList)) {
            for (FlinkSourceConfigDomain flinkSourceConfigDomain : sourceConfigDomainList) {
                final String mappingTableName = flinkSourceConfigDomain.getMappingTableName();
                final List<FlinkFieldDomain> fieldList = flinkSourceConfigDomain.getFieldList();
                final List<String> fieldNames =
                    fieldList.stream().map(f -> f.getField()).collect(Collectors.toList());
                tableFieldList.add(new FlinkSourceTableInfoDomain(mappingTableName, fieldNames));
            }
        }

        return tableFieldList;
    }

    @Override
    public void downloadSqlTaskTemplate(Long nodeId, String tableName, HttpServletResponse response) {
        String fileName = tableName + ".csv";
        String tempFilePath =
            String.format("%s/flinkSqlTemplate/%s", FileUtils.getTempDirectoryPath(), fileName);

        final FlinkSqlTaskDomain domain = flinkSqlTaskService.getActiveByNodeId(nodeId);

        if (null == domain || CollectionUtils.isEmpty(domain.getSourceConfigDomainList())) {
            throw new BusinessException("数据不存在");
        }

        final Optional<FlinkSourceConfigDomain> optional =
            domain.getSourceConfigDomainList().stream()
                .filter(source -> StringUtils.equals(source.getMappingTableName(), tableName))
                .findFirst();

        if (!optional.isPresent()) {
            throw new BusinessException("数据不存在");
        }

        final List<FlinkFieldDomain> fieldList = optional.get().getFieldList();

        final String fieldStr =
            fieldList.stream().map(f -> f.getField()).collect(Collectors.joining(","));

        final File file = new File(tempFilePath);

        try {
            FileUtils.write(file, fieldStr, "UTF-8");
        } catch (IOException e) {
            log.error(e.getMessage(), e);
            throw new BusinessException("写文件异常");
        }

        response.setCharacterEncoding("UTF-8");
        response.setHeader("Content-Disposition", "attachment;filename=" + fileName);
        response.setContentType("application/octet-stream");
        response.setHeader("content-type", "application/octet-stream");

        try {
            FileUtils.copyFile(file, response.getOutputStream());

            FileUtils.deleteQuietly(file);

        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
    }

    @Override
    public List<List<String>> uploadDebugData(Long nodeId, String tableName, MultipartFile file) {
        List<List<String>> datas = new ArrayList<>();

        List<String> lines = new ArrayList<>();

        try (InputStream inputStream = file.getInputStream();
             InputStreamReader inputStreamReader =
                 new InputStreamReader(inputStream, Charset.forName("utf-8"));
             BufferedReader reader = new BufferedReader(inputStreamReader)) {
            int i = 0;
            while (true) {
                String line = reader.readLine();
                if (i == 0) {
                    i++;
                    continue;
                }
                if (i > 1000) {
                    throw new BusinessException("数据文件条数应小于1000条且不超过1MB");
                }

                if (StringUtils.isNotBlank(line)) {
                    final String[] split = line.split(",");
                    datas.add(Arrays.asList(split));
                    lines.add(line);
                    i++;
                } else {
                    break;
                }
            }

        } catch (IOException e) {
            log.error(e.getMessage(), e);
        }

        // 将文件存储到指定目录
        // String dirPath = FileUtils.getTempDirectoryPath();
        // String hdfsTargetDir = String.format("/user/flink/debug/%s/source", nodeId);
        String dirPath =
            String.format(flinkConfig.getCsvDir() + "/debug/%s/source", nodeId);
        // 存储文件
        String filePath = String.format("%s/%s.csv", dirPath, tableName);
        // 老版本逻辑为先写入临时文件，再将临时文件同步到hdfs，最后删除临时文件
        final File tempFile = new File(filePath);
        if (tempFile.exists()) {
            try {
                FileUtils.forceDelete(tempFile);
            } catch (IOException e) {
                log.error(e.getMessage(), e);
            }
        }

        try {
            FileUtils.writeLines(tempFile, "UTF-8", lines);
            // HdfsUtil.uploadFile("flink", filePath, hdfsTargetDir);
        } catch (IOException e) {
            throw new BusinessException("文件存储异常");
        }

        return datas;
    }

    @Override
    public List<List<String>> collectOnlineData(Long nodeId, String tableName, Integer maxCount) {

        final FlinkSqlTaskDomain domain = flinkSqlTaskService.getActiveByNodeId(nodeId);

        if (null == domain || CollectionUtils.isEmpty(domain.getSourceConfigDomainList())) {
            throw new BusinessException("数据不存在");
        }

        final Optional<FlinkSourceConfigDomain> optional =
            domain.getSourceConfigDomainList().stream()
                .filter(source -> StringUtils.equals(source.getMappingTableName(), tableName))
                .findFirst();

        if (!optional.isPresent()) {
            throw new BusinessException("数据不存在");
        }

        final FlinkSourceConfigDomain flinkSourceConfigDomain = optional.get();
        final String dsType = flinkSourceConfigDomain.getDsType();
        if (!StringUtils.equalsIgnoreCase(dsType, DSType.Kafka.name())) {
            throw new BusinessException("暂不支持" + dsType + "类型的数据源");
        }

        FlinkKafkaSourceConfigDomain kafkaSourceConfigDomain =
            (FlinkKafkaSourceConfigDomain) flinkSourceConfigDomain;

        final String topic = kafkaSourceConfigDomain.getTopic();
        final Long dsId = kafkaSourceConfigDomain.getDsId();
        final List<FlinkFieldDomain> fieldList = kafkaSourceConfigDomain.getFieldList();

        final DataSourceDetailDomain datasourceBaseDomain =
            dataSourceService.getDataSourceDetailById(dsId, false);

        final List<String> records = KafkaUtil.preview(datasourceBaseDomain.getDsLink(), topic);

        if (CollectionUtils.isEmpty(records)) {
            return Lists.newArrayList();
        }

        List<List<String>> results = Lists.newArrayList();

        // 生成文件到指定目录
        String csvDir = flinkConfig.getCsvDir();
        String filePath = String.format(csvDir + "/debug/%s/source/%s.csv", nodeId, tableName);
        File tempFile = new File(filePath);
        if (tempFile.exists()) {
            try {
                FileUtils.forceDelete(tempFile);
            } catch (IOException e) {
                log.error(e.getMessage(), e);
            }
        }

        for (String record : records) {
            try {
                // 只支持json格式的解析
                final Object object = JSON.parse(record);
                if (object instanceof JSONObject) {
                    List<String> result = Lists.newArrayList();
                    JSONObject jsonObject = (JSONObject) object;
                    for (FlinkFieldDomain flinkFieldDomain : fieldList) {
                        result.add(jsonObject.getString(flinkFieldDomain.getField()));
                    }
                    results.add(result);
                    FileUtils.write(tempFile, StringUtils.join(result, ",") + "\n", "UTF-8", true);
                }

            } catch (Exception ignore) {

            }
        }

        //    if (tempFile.exists()) {
        //      try {
        //        String hdfsTargetDir = String.format("/user/flink/debug/%s/source", nodeId);
        //        HdfsUtil.uploadFile("flink", filePath, hdfsTargetDir);
        //
        //      } catch (Exception e) {
        //        throw new BusinessException("文件存储异常");
        //      } finally {
        //        FileUtils.deleteQuietly(tempFile);
        //      }
        //    }

        return results;
    }

    @Override
    public FlinkDebugDomain startDebug(Long nodeId) {
        FlinkDebugDomain response = new FlinkDebugDomain();

        StartFlinkTaskParam startFlinkTaskParam = new StartFlinkTaskParam();
        startFlinkTaskParam.setNodeId(nodeId);
        startFlinkTaskParam.setExecType(0);

        Long jobId = flinkTaskApi.startFlinkTask(startFlinkTaskParam);

        response.setJobId(jobId);

        String sqlFilePath = String.format("%s/%s/%s_%s.sql", sqlDirPath, nodeId, nodeId, 0);

        try {
            String sql = FileUtils.readFileToString(new File(sqlFilePath), "UTF-8");
            String pattern = "'password'='((?!').)+'";
            sql = sql.replaceAll(pattern, "'password'='******'");
            response.setContent(sql);
        } catch (IOException e) {
            log.error(e.getMessage(), e);
        }
        return response;
    }

    @Override
    public Boolean stopDebug(Long jobId) {
        final FlinkJobDomain jobDomain = flinkJobService.getByFlinkJobId(jobId);
        if (null == jobDomain || StringUtils.isBlank(jobDomain.getYarnApplicationId())) {
            return true;
        }

        try {
            //      StopFlinkTaskParam param = new StopFlinkTaskParam();
            //      param.setJobId(jobId);
            //      param.setJobStatus(FlinkJobStatusEnum.STOPPED.getCode());
            //      param.setNeedSavepoint(false);
            flinkTaskApi.stopFlinkTask(jobId, FlinkJobStatusEnum.STOPPED.getCode(), false);
        } catch (Exception e) {
            log.error(e.getMessage(), e);
            throw new BusinessException("停止调试异常");
        }
        return true;
    }

    @Override
    public FlinkJobLogDomain getLog(Long jobId) {
        final FlinkJobDomain jobDomain = flinkJobService.getByFlinkJobId(jobId);
        if (null == jobDomain) {
            throw new BusinessException("数据不存在");
        }

        String timeFormat = "yyyy-MM-dd HH:mm:ss.SSS";

        FlinkJobLogDomain response = new FlinkJobLogDomain();

        List<String> logList = Lists.newArrayList();

        final Integer taskStatus = jobDomain.getStatus();
        if (Objects.equals(FlinkJobStatusEnum.RUNNING.getCode(), taskStatus)) {
            if (StringUtils.isBlank(jobDomain.getYarnApplicationId())) {
                logList.add(String.format("[%s] 等待运行....", DateFormatUtils.format(new Date(), timeFormat)));
                response.setLogEnd(false);
                response.setOffset(0);
                response.setLogContent(logList);
                return response;
            }

            logList.add(String.format("[%s] 运行中....", DateFormatUtils.format(new Date(), timeFormat)));
            response.setLogEnd(false);
            response.setOffset(0);
            response.setLogContent(logList);
            return response;
        }

        response.setLogEnd(true);
        response.setOffset(0);

        if (StringUtils.isNotBlank(jobDomain.getYarnApplicationId())) {
            GetFlinkTaskLogParam param = new GetFlinkTaskLogParam();
            param.setJobId(jobId);

            logList = flinkTaskApi.getFlinkTaskLogList(param.getJobId());
        }

        String logFileName = String.format("%s_%s", jobDomain.getNodeId(), 0);
        try {
            final List<String> strings = LogUtil.readFlinkLog(jobDomain.getNodeId(), 1);
            if (CollectionUtils.isNotEmpty(strings)) {
                logList.addAll(strings);
            }
        } catch (IOException e) {
            log.error(e.getMessage(), e);
        }

        if (Objects.equals(FlinkJobStatusEnum.SUCCESS.getCode(), taskStatus)) {
            logList.add(String.format("[%s] 调试运行成功...", DateFormatUtils.format(new Date(), timeFormat)));
        }

        if (Objects.equals(FlinkJobStatusEnum.FAILURE.getCode(), taskStatus)) {
            logList.add(String.format("[%s] 调试运行失败...", DateFormatUtils.format(new Date(), timeFormat)));
        }

        if (Objects.equals(FlinkJobStatusEnum.STOPPED.getCode(), taskStatus)) {
            logList.add(String.format("[%s] 调试任务停止...", DateFormatUtils.format(new Date(), timeFormat)));
        }

        if (Objects.equals(FlinkJobStatusEnum.PAUSED.getCode(), taskStatus)) {
            logList.add(String.format("[%s] 调试任务暂停...", DateFormatUtils.format(new Date(), timeFormat)));
        }

        response.setLogContent(logList);

        return response;
    }

    @Override
    public PageResult<FlinkLogRecordDomain> getTaskRecordPageInfo(
        TaskRecordInfoParam taskRecordInfoParam) {
        Integer pageNum = taskRecordInfoParam.getPageNum();
        Integer pageSize = taskRecordInfoParam.getPageSize();

        Long projectId = ServletUtils.getProjectId();

        /** 字段排序 + 条件查询 + 分页 */
        String order = "order by create_time DESC";

        /** 排序字段 */
        //    Sort sort = Sort.by(Sort.Direction.DESC, "startTime");
        String orderField = taskRecordInfoParam.getOrderField();
        String orderType = taskRecordInfoParam.getOrder();
        if (StringUtils.isNotEmpty(orderField) && StringUtils.isNotEmpty(orderType)) {
            if ("taskName".equalsIgnoreCase(orderField)) {
                //        sort = Sort.by(Sort.Direction.fromString(orderType), "name");
                order = "order by task_name DESC";
            }

            if ("planTime".equalsIgnoreCase(orderField)) {
                order = "order by plan_time DESC";
            }

            if ("startTime".equalsIgnoreCase(orderField)) {
                order = "order by start_time DESC";
            }

            if ("endTime".equalsIgnoreCase(orderField)) {
                order = "order by end_time DESC";
            }
        }
        PageResult<FlinkLogRecordDomain> taskRecordPageInfo =
            flinkJobService.getTaskRecordPageInfo(
                projectId,
                order,
                pageNum,
                pageSize,
                taskRecordInfoParam.getNodeName(),
                taskRecordInfoParam.getTaskStatus(),
                taskRecordInfoParam.getExecType());

        return taskRecordPageInfo;
    }

    @Override
    public FlinkLogDomain getTaskRecordDetailById(Long id) {
        FlinkLogDomain response = new FlinkLogDomain();
        response.setLogEnd(true);
        response.setOffset(0);

        final FlinkJobDomain jobDomain = flinkJobService.getByFlinkJobId(id);
        final Integer taskStatus = jobDomain.getStatus();
        if (Objects.equals(FlinkJobStatusEnum.RUNNING.getCode(), taskStatus)) {
            response.setLogEnd(false);
        }

        GetFlinkTaskLogParam param = new GetFlinkTaskLogParam();
        param.setJobId(id);

        final List<String> logs = flinkTaskApi.getFlinkTaskLogList(param.getJobId());
        if (CollectionUtils.isEmpty(logs)) {
            response.setLogContent(Lists.newArrayList("暂未发现日志...."));
            return response;
        }

        response.setLogContent(logs);
        return response;
    }

    @Override
    public PreviewDataDomain previewData(Long dsId, String tableName) {
        DataSourceDetailDomain dataSourceDetailById =
            dataSourceService.getDataSourceDetailById(dsId, false);
        DataSourceManagerDomain managerInfo = dataSourceService.getManagerInfo(dsId);
        if (dataSourceDetailById == null) {
            throw new BusinessException("找不到对应的数据源:数据源Id " + dsId);
        }
        String dsType = dataSourceDetailById.getDsType();
        if (DSType.Hive.name().equalsIgnoreCase(dsType) || DSType.isRdbms(dsType)) {

            BaseDbManager dbManager = managerInfo.getDbManager();

            try {
                PreviewDataDomain previewDataDomain = dbManager.previewData(tableName);
                return previewDataDomain;
            } catch (Exception e) {
                log.error(e.getMessage(), e);
                throw new BusinessException("预览数据出错:" + e.getMessage());
            }
        }

        if (DSType.HBase.name().equalsIgnoreCase(dsType)) {

            try {
                HBaseManager hBaseManager = managerInfo.getHBaseManager();
                PreviewDataDomain previewDataDomain = hBaseManager.previewData(tableName, false);
                return previewDataDomain;
            } catch (Exception e) {
                log.error(e.getMessage(), e);
                throw new BusinessException("预览数据出错:" + e.getMessage());
            }
        }

        throw new BusinessException("不支持的数据源类型:" + dataSourceDetailById.getDsType());
    }

    @Override
    public List<String> previewKafkaData(Long dsId, String topic) {
        DataSourceDetailDomain dataSourceDetailById =
            dataSourceService.getDataSourceDetailById(dsId, false);

        List<String> preview =
            KafkaUtil.preview(dataSourceDetailById.getDsLink(), Lists.newArrayList(topic), 10, 10000);

        return preview;
    }

    @Override
    public Boolean stopFlinkJob(Long id, Integer status) {
        StopFlinkTaskParam param = new StopFlinkTaskParam();
        param.setJobId(id);
        param.setJobStatus(FlinkJobStatusEnum.STOPPED.getCode());
        param.setNeedSavepoint(status == 1);
        flinkTaskApi.stopFlinkTask(param.getJobId(), param.getJobStatus(), status == 1);
        return true;
    }

    @Override
    public Boolean retryJob(Long id) {
        // 只要设置任务为暂停状态，等定时任务自动拉起任务即可
        flinkJobService.updateJobStatus(id, FlinkJobStatusEnum.PAUSED.getCode());
        return true;
    }

    @Override
    public Boolean restartJob(RestartJobDomain request) {
        final FlinkJobDomain jobDomain = flinkJobService.getByFlinkJobId(request.getId());

        StartFlinkTaskParam param = new StartFlinkTaskParam();
        param.setNodeId(jobDomain.getNodeId());
        param.setExecType(1);

        if (request.getType() == 1) {
            final FlinkCheckpointInfo lastSavepoint = flinkTaskApi.getLastSavepoint(request.getId());

            if (null != lastSavepoint) {
                param.setSavePoint(lastSavepoint.getCheckpointPath());
            }
        } else if (request.getType() == 2) {
            final List<RestartJobDomain.TableTime> tableTimes = request.getTableTimes();
            if (CollectionUtils.isNotEmpty(tableTimes)) {
                final Map<String, String> map =
                    tableTimes.stream().collect(Collectors.toMap(x -> x.getTable(), x -> x.getTime()));
                final FlinkSqlTaskDomain flinkSqlTaskDomain =
                    flinkSqlTaskService.getActiveByNodeId(jobDomain.getNodeId());
                final List<FlinkSourceConfigDomain> sourceConfigDomainList =
                    flinkSqlTaskDomain.getSourceConfigDomainList();
                if (CollectionUtils.isNotEmpty(sourceConfigDomainList)) {
                    for (FlinkSourceConfigDomain flinkSourceConfigDomain : sourceConfigDomainList) {
                        if (flinkSourceConfigDomain instanceof FlinkKafkaSourceConfigDomain) {
                            FlinkKafkaSourceConfigDomain kafkaSourceConfigDomain =
                                (FlinkKafkaSourceConfigDomain) flinkSourceConfigDomain;
                            kafkaSourceConfigDomain.setOffset(2);
                            kafkaSourceConfigDomain.setStartPointTime(
                                map.get(kafkaSourceConfigDomain.getMappingTableName()));
                        }
                    }

                    String content = JSON.toJSONString(sourceConfigDomainList);
                    flinkSqlTaskService.saveSourceOnly(
                        jobDomain.getNodeId(), content, UserContext.getUserId());
                }
            }
        }

        flinkTaskApi.startFlinkTask(param);

        return true;
    }

    @Override
    public Boolean continueJob(Long id, String point) {
        final FlinkJobDomain jobDomain = flinkJobService.getByFlinkJobId(id);

        StartFlinkTaskParam param = new StartFlinkTaskParam();
        param.setNodeId(jobDomain.getNodeId());
        param.setExecType(1);
        param.setSavePoint(point);
        flinkTaskApi.startFlinkTask(param);
        return true;
    }

    @Override
    public List<FlinkCheckpointInfo> getListCheckPoint(Long id) {
        List<FlinkCheckpointInfo> checkpointList = flinkTaskApi.getCheckpointList(id);

        return checkpointList;
    }

    @Override
    public FlinkCheckpointInfo getLastSavepoint(Long id) {
        FlinkCheckpointInfo lastSavepoint = flinkTaskApi.getLastSavepoint(id);
        return lastSavepoint;
    }

    @Override
    @Deprecated
    public List<String> getBinlogFileByDsId(Long dsId) {
        /** 实时同步部分 */
        return null;
    }

    @Override
    @Deprecated
    public List<String> getOracleSchemaByDsId(Long dsId) {

        return null;
    }

    @Override
    @Deprecated
    public List<String> getTablesNames(List<String> tableName, Long projectId) {
        return null;
    }

    @Override
    public List<HiveTableInfoDomain> getPartitionTableList(Long projectId) {
        // FIXME 变交互 前端选择hive数据库 传入hive的dsId
        DataSourceQueryDomain dataSourceQueryDomain = new DataSourceQueryDomain();
        dataSourceQueryDomain.setProjectId(projectId);
        dataSourceQueryDomain.setDsTypes(Lists.newArrayList(DSType.Hive.name()));
        List<DataSourceMultiDomain> dsByParams = dataSourceService.getDSByParams(dataSourceQueryDomain);
        if (CollectionUtils.isEmpty(dsByParams)) {
            return Collections.emptyList();
        }
        dsByParams.forEach(
            ds -> {
                Long id = ds.getId();
                DataSourceManagerDomain managerInfo = dataSourceService.getManagerInfo(id);
                BaseDbManager dbManager = managerInfo.getDbManager();
                List<TableInfoDomain> tableList = null;
                try {
                    tableList = dbManager.getTableList(ds.getDbName());
                } catch (SQLException throwables) {

                }
                List<TableInfoDomain> partitionTbales =
                    tableList.stream()
                        .filter(table -> table.isPartitionTable())
                        .collect(Collectors.toList());
                List<TableInfoDomain> resultTable =
                    partitionTbales.stream()
                        .filter(e -> e.getTblType().equals("MANAGED_TABLE"))
                        .collect(Collectors.toList());
            });
        return null;
    }

    @Override
    public List<String> getPartitionInfoByTblName(String tableName, Long projectId) {
        // fixme 同上
        return null;
    }

    @Override
    public FlinkJobDetailDomain getFlinkRecordDetailByrecordId(Long recordId) {
        FlinkJobEntity detailById = flinkJobService.getDetailById(recordId);
        FlinkJobDetailDomain convert = Convert.convert(FlinkJobDetailDomain.class, detailById);

        FlinkNodeDomain nodeInfo = flinkNodeService.getFinkNodeById(detailById.getNodeId());
        FlinkNodeConfigDomain nodeConfig =
            flinkNodeConfigService.getNodeConfigById(detailById.getNodeConfigId());

        String creatorId = nodeConfig.getCreatorId();
        Date publishTime = nodeConfig.getPublishTime();
        Integer effectDayType =
            Objects.nonNull(nodeConfig.getScheduleConfig())
                ? nodeConfig.getScheduleConfig().getEffectDayType()
                : null;
        convert.setScheduleType(effectDayType);
        convert.setUserName(userService.getUserById(creatorId).getUserName());
        convert.setPlanTime(publishTime);
        convert.setTaskName(flinkNodeService.getById(detailById.getNodeId()).getName());
        Date startTime = detailById.getStartTime();
        Date endTime = detailById.getEndTime();
        Long expendTime = null;
        if (Objects.nonNull(endTime) && Objects.nonNull(startTime)) {
            expendTime = (endTime.getTime() - startTime.getTime()) / 1000;
        }
        convert.setExpendTime(expendTime);
        return convert;
    }

    @Override
    public FlinkTaskSummaryDomain getSummary(Long projectId) {
        List<FlinkJobEntity> jobs = flinkJobService.getJobsByProjectId(projectId);
        Long running =
            jobs.stream().filter(i -> i.getStatus() == FlinkJobStatusEnum.RUNNING.getCode()).count();
        Long stop =
            jobs.stream()
                .filter(
                    i ->
                        i.getStatus() == FlinkJobStatusEnum.STOPPED.getCode()
                            || i.getStatus() == FlinkJobStatusEnum.PAUSED.getCode())
                .count();
        Long failure =
            jobs.stream().filter(i -> i.getStatus() == FlinkJobStatusEnum.FAILURE.getCode()).count();
        FlinkTaskSummaryDomain summaryDomain = new FlinkTaskSummaryDomain();
        summaryDomain.setRunningNum(running.intValue());
        summaryDomain.setStoppedNum(stop.intValue());
        summaryDomain.setFailedNum(failure.intValue());
        return summaryDomain;
    }

    //    @BizDataIndexAnnotation(
//        type = BizDataIndexTypeEnum.ONLINE_DEV,
//        dataIdFieldName = "nodeId",
//        dataIdParamIndex = 0,
//        operateType = BizDataIndexAnnotation.OperateType.SAVE)
    @Override
    public Boolean saveFlinkTaskNode(SaveFlinkTaskNodeParam param) {
        String userId = UserContext.getUserId();
        Long nodeId = param.getNodeId();
        boolean checkFlag =
            flinkNodeService.hasSameMenuNode(
                null,
                param.getPid(),
                ServletUtils.getProjectId(),
                param.getName(),
                param.getNodeType());
        if (checkFlag && Objects.isNull(nodeId)) {
            throw new BusinessException("存在同名节点");
        }
        if (Objects.isNull(param.getNodeId())) {
            SaveTaskNodeParam convert = Convert.convert(SaveTaskNodeParam.class, param);
            convert.setUserId(UserContext.getUserId());
            convert.setProjectId(ServletUtils.getProjectId());
            convert.setNodeType(FlinkNodeTypeEnum.TASK_SQL.getCode());
            if (Objects.isNull(convert.getPid())) {
                convert.setPid(Long.valueOf(FlinkNodeTopEnum.FLINK_DEV_NODE.getId()));
            }

            param.setNodeId(nodeId);
        }

        FlinkMenuNodeEntity entity = new FlinkMenuNodeEntity();
        entity.setName(StringUtils.trim(param.getName()));
        entity.setPid(param.getPid());
        entity.setUpdateTime(new Date());
        entity.setUpdaterId(userId);
        entity.setNodeType(param.getNodeType());
        entity.setRowState(1);
        entity.setRemark(param.getRemark());
        if (Objects.isNull(nodeId)) {
            entity.setCreateTime(new Date());
            entity.setNodeType(param.getNodeType());
            entity.setProjectId(ServletUtils.getProjectId());
            entity.setCreatorId(userId);
            flinkNodeService.save(entity);
            nodeId = entity.getId();
        } else {
            entity.setId(param.getNodeId());
            flinkNodeService.updateById(entity);
        }

        if (param.getNodeType().equals(FlinkNodeTypeEnum.TASK_JAR.getCode())) {
            FlinkNodeConfigEntity nodeConfigByNodeId = flinkNodeConfigService.getActiveByNodeId(nodeId);
            boolean flag = true;
            if (nodeConfigByNodeId == null) {
                nodeConfigByNodeId = new FlinkNodeConfigEntity();
                nodeConfigByNodeId.setEnvConfig(flinkService.getDefaultEnv());
                nodeConfigByNodeId.setIsActive(1);
                flag = false;
            }
            FlinkNodeConfigEntity.JarTaskConfig jarTaskConfig = new FlinkNodeConfigEntity.JarTaskConfig();
            jarTaskConfig.setResourceNodeId(param.getResourceNodeId());
            jarTaskConfig.setMainClass(param.getMainClass());
            nodeConfigByNodeId.setNodeId(nodeId);
            nodeConfigByNodeId.setCreateTime(new Date());
            nodeConfigByNodeId.setUpdateTime(new Date());
            nodeConfigByNodeId.setCreatorId(userId);
            nodeConfigByNodeId.setUpdaterId(userId);
            nodeConfigByNodeId.setProjectId(ServletUtils.getProjectId());
            nodeConfigByNodeId.setNodeType(FlinkNodeTypeEnum.TASK_JAR.getCode());
            //      FlinkNodeConfigEntity flinkNodeConfigEntity =
            //              ConvertUtil.copyProperties(nodeConfigByNodeId, FlinkNodeConfigEntity.class);
            nodeConfigByNodeId.setJarTaskConfig(jarTaskConfig);
            if (flag) {
                flinkNodeConfigService.updateOne(nodeConfigByNodeId);
            } else {
                flinkNodeConfigService.insertOne(nodeConfigByNodeId);
            }
        }

        return Boolean.TRUE;
    }

    @Override
    public Long getNodeIdForSp(String nodeName, Long projectId) {
        List<FlinkMenuNodeEntity> multi = flinkNodeService.list(new LambdaUpdateWrapper<FlinkMenuNodeEntity>().eq(FlinkMenuNodeEntity::getProjectId, projectId)
            .eq(FlinkMenuNodeEntity::getName, nodeName)
            .eq(FlinkMenuNodeEntity::getPid, 1)
            .eq(FlinkMenuNodeEntity::getNodeType, 1));

        if (CollectionUtils.isNotEmpty(multi)) {
            return multi.get(0).getId();
        }
        return null;
    }
}
